HDDS-2089: Add createPipeline CLI. (#1418)
(cherry picked from commit 326b5acd4a63fe46821919322867f5daff30750c)
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/SCMAction.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/SCMAction.java
index d03ad15..f8b1bbf 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/SCMAction.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/SCMAction.java
@@ -31,6 +31,7 @@
GET_CONTAINER,
GET_CONTAINER_WITH_PIPELINE,
LIST_CONTAINER,
+ CREATE_PIPELINE,
LIST_PIPELINE,
CLOSE_PIPELINE,
ACTIVATE_PIPELINE,
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java
index 99c9e8d..092aba3 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java
@@ -242,8 +242,14 @@
public PipelineResponseProto allocatePipeline(
RpcController controller, PipelineRequestProto request)
throws ServiceException {
- // TODO : Wiring this up requires one more patch.
- return null;
+ try (Scope scope = TracingUtil
+ .importAndCreateScope("createPipeline", request.getTraceID())) {
+ impl.createReplicationPipeline(request.getReplicationType(),
+ request.getReplicationFactor(), request.getNodePool());
+ return PipelineResponseProto.newBuilder().build();
+ } catch (IOException e) {
+ throw new ServiceException(e);
+ }
}
@Override
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SimplePipelineProvider.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SimplePipelineProvider.java
index ab98dfa..54e2141 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SimplePipelineProvider.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SimplePipelineProvider.java
@@ -48,7 +48,7 @@
String e = String
.format("Cannot create pipeline of factor %d using %d nodes.",
factor.getNumber(), dns.size());
- throw new IOException(e);
+ throw new InsufficientDatanodesException(e);
}
Collections.shuffle(dns);
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
index 7d9cb3e..7708bed 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
@@ -390,10 +390,10 @@
public Pipeline createReplicationPipeline(HddsProtos.ReplicationType type,
HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool)
throws IOException {
- // TODO: will be addressed in future patch.
- // This is needed only for debugging purposes to make sure cluster is
- // working correctly.
- return null;
+ Pipeline result = scm.getPipelineManager().createPipeline(type, factor);
+ AUDIT.logWriteSuccess(
+ buildAuditMessageForSuccess(SCMAction.CREATE_PIPELINE, null));
+ return result;
}
@Override
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java
index 1b95418..1246faef 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java
@@ -35,6 +35,7 @@
import org.apache.hadoop.hdds.scm.cli.pipeline.ActivatePipelineSubcommand;
import org.apache.hadoop.hdds.scm.cli.pipeline.ClosePipelineSubcommand;
import org.apache.hadoop.hdds.scm.cli.pipeline.DeactivatePipelineSubcommand;
+import org.apache.hadoop.hdds.scm.cli.pipeline.CreatePipelineSubcommand;
import org.apache.hadoop.hdds.scm.cli.pipeline.ListPipelinesSubcommand;
import org.apache.hadoop.hdds.scm.client.ContainerOperationClient;
import org.apache.hadoop.hdds.scm.client.ScmClient;
@@ -85,6 +86,7 @@
DeleteSubcommand.class,
CreateSubcommand.class,
CloseSubcommand.class,
+ CreatePipelineSubcommand.class,
ListPipelinesSubcommand.class,
ActivatePipelineSubcommand.class,
DeactivatePipelineSubcommand.class,
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java
new file mode 100644
index 0000000..edeb786
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.cli.pipeline;
+
+import org.apache.hadoop.hdds.cli.HddsVersionProvider;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.cli.SCMCLI;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import picocli.CommandLine;
+
+import java.util.concurrent.Callable;
+
+/**
+ * Handler of createPipeline command.
+ */
+@CommandLine.Command(
+ name = "createPipeline",
+ description = "create pipeline",
+ mixinStandardHelpOptions = true,
+ versionProvider = HddsVersionProvider.class)
+public class CreatePipelineSubcommand implements Callable<Void> {
+ @CommandLine.ParentCommand
+ private SCMCLI parent;
+
+ @CommandLine.Option(
+ names = {"-t", "--replicationType"},
+ description = "Replication type (STAND_ALONE, RATIS)",
+ defaultValue = "STAND_ALONE"
+ )
+ private HddsProtos.ReplicationType type
+ = HddsProtos.ReplicationType.STAND_ALONE;
+
+ @CommandLine.Option(
+ names = {"-f", "--replicationFactor"},
+ description = "Replication factor (ONE, THREE)",
+ defaultValue = "ONE"
+ )
+ private HddsProtos.ReplicationFactor factor
+ = HddsProtos.ReplicationFactor.ONE;
+
+ @Override
+ public Void call() throws Exception {
+ if (type == HddsProtos.ReplicationType.CHAINED) {
+ throw new IllegalArgumentException(type.name()
+ + " is not supported yet.");
+ }
+ try (ScmClient scmClient = parent.createScmClient()) {
+ scmClient.createReplicationPipeline(
+ type,
+ factor,
+ HddsProtos.NodePool.getDefaultInstance());
+ return null;
+ }
+ }
+}
\ No newline at end of file