HDDS-4872. Upgrade UsageInfoSubcommand with options to show most and least used datanodes. (#1982)
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java
index abb0be2..8ab2ba4 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java
@@ -301,11 +301,24 @@
*
* @param ipaddress datanode ipaddress String
* @param uuid datanode uuid String
- * @return List of DatanodeUsageInfo. Each element contains info such as
+ * @return List of DatanodeUsageInfoProto. Each element contains info such as
* capacity, SCMused, and remaining space.
* @throws IOException
*/
- List<HddsProtos.DatanodeUsageInfo> getDatanodeUsageInfo(String ipaddress,
- String uuid)
+ List<HddsProtos.DatanodeUsageInfoProto> getDatanodeUsageInfo(String ipaddress,
+ String uuid)
throws IOException;
+
+ /**
+ * Get usage information of most or least used datanodes.
+ *
+ * @param mostUsed true if most used, false if least used
+ * @param count Integer number of nodes to get info for
+ * @return List of DatanodeUsageInfoProto. Each element contains info such as
+ * capacity, SCMUsed, and remaining space.
+ * @throws IOException
+ */
+ List<HddsProtos.DatanodeUsageInfoProto> getDatanodeUsageInfo(
+ boolean mostUsed, int count) throws IOException;
+
}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
index aa14a27..3666e22 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
@@ -262,13 +262,24 @@
/**
* Get Datanode usage information by ip or uuid.
*
- * @param ipaddress - datanode IP address String
- * @param uuid - datanode UUID String
- * @return List of DatanodeUsageInfo. Each element contains info such as
+ * @param ipaddress datanode IP address String
+ * @param uuid datanode UUID String
+ * @return List of DatanodeUsageInfoProto. Each element contains info such as
* capacity, SCMused, and remaining space.
* @throws IOException
*/
- List<HddsProtos.DatanodeUsageInfo> getDatanodeUsageInfo(String ipaddress,
- String uuid)
- throws IOException;
+ List<HddsProtos.DatanodeUsageInfoProto> getDatanodeUsageInfo(
+ String ipaddress, String uuid) throws IOException;
+
+ /**
+ * Get usage information of most or least used datanodes.
+ *
+ * @param mostUsed true if most used, false if least used
+ * @param count Integer number of nodes to get info for
+ * @return List of DatanodeUsageInfoProto. Each element contains info such as
+ * capacity, SCMUsed, and remaining space.
+ * @throws IOException
+ */
+ List<HddsProtos.DatanodeUsageInfoProto> getDatanodeUsageInfo(
+ boolean mostUsed, int count) throws IOException;
}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
index 24fdf0d..63a88bd 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
@@ -623,14 +623,14 @@
/**
* Builds request for datanode usage information and receives response.
*
- * @param ipaddress - Address String
- * @param uuid - UUID String
- * @return List of DatanodeUsageInfo. Each element contains info such as
+ * @param ipaddress Address String
+ * @param uuid UUID String
+ * @return List of DatanodeUsageInfoProto. Each element contains info such as
* capacity, SCMUsed, and remaining space.
* @throws IOException
*/
@Override
- public List<HddsProtos.DatanodeUsageInfo> getDatanodeUsageInfo(
+ public List<HddsProtos.DatanodeUsageInfoProto> getDatanodeUsageInfo(
String ipaddress, String uuid) throws IOException {
DatanodeUsageInfoRequestProto request =
@@ -646,6 +646,32 @@
return response.getInfoList();
}
+ /**
+ * Get usage information of most or least used datanodes.
+ *
+ * @param mostUsed true if most used, false if least used
+ * @param count Integer number of nodes to get info for
+ * @return List of DatanodeUsageInfoProto. Each element contains info such as
+ * capacity, SCMUsed, and remaining space.
+ * @throws IOException
+ */
+ @Override
+ public List<HddsProtos.DatanodeUsageInfoProto> getDatanodeUsageInfo(
+ boolean mostUsed, int count) throws IOException {
+ DatanodeUsageInfoRequestProto request =
+ DatanodeUsageInfoRequestProto.newBuilder()
+ .setMostUsed(mostUsed)
+ .setCount(count)
+ .build();
+
+ DatanodeUsageInfoResponseProto response =
+ submitRequest(Type.DatanodeUsageInfo,
+ builder -> builder.setDatanodeUsageInfoRequest(request))
+ .getDatanodeUsageInfoResponse();
+
+ return response.getInfoList();
+ }
+
@Override
public Object getUnderlyingProxyObject() {
return rpcProxy;
diff --git a/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto b/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto
index 7330f2e..66b6d6f 100644
--- a/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto
+++ b/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto
@@ -255,15 +255,17 @@
}
/*
- Usage info request message containing ip and uuid.
+ Datanode usage info request message.
*/
message DatanodeUsageInfoRequestProto {
optional string ipaddress = 1;
optional string uuid = 2;
+ optional bool mostUsed = 3;
+ optional uint32 count = 4;
}
message DatanodeUsageInfoResponseProto {
- repeated DatanodeUsageInfo info = 1;
+ repeated DatanodeUsageInfoProto info = 1;
}
/*
diff --git a/hadoop-hdds/interface-client/src/main/proto/hdds.proto b/hadoop-hdds/interface-client/src/main/proto/hdds.proto
index 8250449..b43600c 100644
--- a/hadoop-hdds/interface-client/src/main/proto/hdds.proto
+++ b/hadoop-hdds/interface-client/src/main/proto/hdds.proto
@@ -156,10 +156,11 @@
repeated Node nodes = 1;
}
-message DatanodeUsageInfo {
+message DatanodeUsageInfoProto {
optional int64 capacity = 1;
optional int64 used = 2;
optional int64 remaining = 3;
+ optional DatanodeDetailsProto node = 4;
}
/**
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeStat.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeStat.java
index 962bbb4..270df05 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeStat.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeStat.java
@@ -138,4 +138,27 @@
public int hashCode() {
return Long.hashCode(capacity.get() ^ scmUsed.get() ^ remaining.get());
}
+
+ /**
+ * Compares this SCMNodeStat with other on the basis of remaining to
+ * capacity ratio.
+ *
+ * @param other The SCMNodeStat object to compare with this object.
+ * @return A value greater than 0 if this has lesser remaining ratio than the
+ * specified other, a value lesser than 0 if this has greater remaining ratio
+ * than the specified other, and 0 if remaining ratios are equal.
+ */
+ public int compareByRemainingRatio(SCMNodeStat other) {
+ Preconditions.checkNotNull(other, "Argument cannot be null");
+
+ // if capacity is zero, replace with 1 for division to work
+ double thisCapacity = Math.max(this.getCapacity().get().doubleValue(), 1d);
+ double otherCapacity = Math.max(
+ other.getCapacity().get().doubleValue(), 1d);
+
+ double thisRemainingRatio = this.getRemaining().get() / thisCapacity;
+ double otherRemainingRatio = other.getRemaining().get() / otherCapacity;
+
+ return Double.compare(otherRemainingRatio, thisRemainingRatio);
+ }
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeUsageInfo.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeUsageInfo.java
new file mode 100644
index 0000000..a88b2b9
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeUsageInfo.java
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.node;
+
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat;
+
+import java.util.Comparator;
+
+public class DatanodeUsageInfo {
+
+ private DatanodeDetails datanodeDetails;
+ private SCMNodeStat scmNodeStat;
+
+ /**
+ * Constructs a DatanodeUsageInfo with DatanodeDetails and SCMNodeStat.
+ *
+ * @param datanodeDetails DatanodeDetails
+ * @param scmNodeStat SCMNodeStat
+ */
+ public DatanodeUsageInfo(
+ DatanodeDetails datanodeDetails,
+ SCMNodeStat scmNodeStat) {
+ this.datanodeDetails = datanodeDetails;
+ this.scmNodeStat = scmNodeStat;
+ }
+
+ /**
+ * Compares two DatanodeUsageInfo on the basis of remaining space to capacity
+ * ratio.
+ *
+ * @param first DatanodeUsageInfo
+ * @param second DatanodeUsageInfo
+ * @return a value greater than 0 if second has higher remaining to
+ * capacity ratio, a value lesser than 0 if first has higher remaining to
+ * capacity ratio, and 0 if both have equal ratios or first.equals(second)
+ * is true
+ */
+ private static int compareByRemainingRatio(DatanodeUsageInfo first,
+ DatanodeUsageInfo second) {
+ if (first.equals(second)) {
+ return 0;
+ }
+ return first.getScmNodeStat()
+ .compareByRemainingRatio(second.getScmNodeStat());
+ }
+
+ /**
+ * Sets DatanodeDetails of this DatanodeUsageInfo.
+ *
+ * @param datanodeDetails the DatanodeDetails to use
+ */
+ public void setDatanodeDetails(
+ DatanodeDetails datanodeDetails) {
+ this.datanodeDetails = datanodeDetails;
+ }
+
+ /**
+ * Sets SCMNodeStat of this DatanodeUsageInfo.
+ *
+ * @param scmNodeStat the SCMNodeStat to use.
+ */
+ public void setScmNodeStat(
+ SCMNodeStat scmNodeStat) {
+ this.scmNodeStat = scmNodeStat;
+ }
+
+ /**
+ * Gets DatanodeDetails of this DatanodeUsageInfo.
+ *
+ * @return DatanodeDetails
+ */
+ public DatanodeDetails getDatanodeDetails() {
+ return datanodeDetails;
+ }
+
+ /**
+ * Gets SCMNodeStat of this DatanodeUsageInfo.
+ *
+ * @return SCMNodeStat
+ */
+ public SCMNodeStat getScmNodeStat() {
+ return scmNodeStat;
+ }
+
+ /**
+ * Gets Comparator that compares two DatanodeUsageInfo on the basis of
+ * remaining space to capacity ratio.
+ *
+ * @return Comparator to compare two DatanodeUsageInfo. The comparison
+ * function returns a value greater than 0 if second DatanodeUsageInfo has
+ * greater remaining space to capacity ratio, a value lesser than 0 if
+ * first DatanodeUsageInfo has greater remaining space to capacity ratio,
+ * and 0 if both have equal ratios or first.equals(second) is true
+ */
+ public static Comparator<DatanodeUsageInfo> getMostUsedByRemainingRatio() {
+ return DatanodeUsageInfo::compareByRemainingRatio;
+ }
+
+ /**
+ * Checks if the specified Object o is equal to this DatanodeUsageInfo.
+ *
+ * @param o Object to check
+ * @return true if both refer to the same object or if both have the same
+ * DatanodeDetails, false otherwise
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ DatanodeUsageInfo that = (DatanodeUsageInfo) o;
+ return datanodeDetails.equals(that.datanodeDetails);
+ }
+
+ @Override
+ public int hashCode() {
+ return datanodeDetails.hashCode();
+ }
+
+ /**
+ * Converts an object of type DatanodeUsageInfo to Protobuf type
+ * HddsProtos.DatanodeUsageInfoProto.
+ *
+ * @return Protobuf HddsProtos.DatanodeUsageInfo
+ */
+ public HddsProtos.DatanodeUsageInfoProto toProto() {
+ return toProtoBuilder().build();
+ }
+
+ private HddsProtos.DatanodeUsageInfoProto.Builder toProtoBuilder() {
+ HddsProtos.DatanodeUsageInfoProto.Builder builder =
+ HddsProtos.DatanodeUsageInfoProto.newBuilder();
+
+ if (datanodeDetails != null) {
+ builder.setNode(
+ datanodeDetails.toProto(datanodeDetails.getCurrentVersion()));
+ }
+ if (scmNodeStat != null) {
+ builder.setCapacity(scmNodeStat.getCapacity().get());
+ builder.setUsed(scmNodeStat.getScmUsed().get());
+ builder.setRemaining(scmNodeStat.getRemaining().get());
+ }
+ return builder;
+ }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java
index 22a1e81..9f4ac2e 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java
@@ -17,17 +17,17 @@
*/
package org.apache.hadoop.hdds.scm.node;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.NodeReportProto;
import org.apache.hadoop.hdds.scm.container.ContainerID;
-import org.apache.hadoop.hdds.scm.net.NetworkTopology;
-import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
-import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric;
import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat;
+import org.apache.hadoop.hdds.scm.net.NetworkTopology;
import org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException;
-import org.apache.hadoop.hdds.protocol.DatanodeDetails;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState;
+import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
+import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
import org.apache.hadoop.hdds.server.events.EventHandler;
import org.apache.hadoop.ozone.protocol.StorageContainerNodeProtocol;
import org.apache.hadoop.ozone.protocol.commands.CommandForDatanode;
@@ -118,6 +118,17 @@
Map<DatanodeDetails, SCMNodeStat> getNodeStats();
/**
+ * Gets a sorted list of most or least used DatanodeUsageInfo containing
+ * healthy, in-service nodes. If the specified mostUsed is true, the returned
+ * list is in descending order of usage. Otherwise, the returned list is in
+ * ascending order of usage.
+ *
+ * @param mostUsed true if most used, false if least used
+ * @return List of DatanodeUsageInfo
+ */
+ List<DatanodeUsageInfo> getMostOrLeastUsedDatanodes(boolean mostUsed);
+
+ /**
* Return the node stat of the specified datanode.
* @param datanodeDetails DatanodeDetails.
* @return node stat if it is live/stale, null if it is decommissioned or
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java
index c03990f..b5ecaac 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java
@@ -17,26 +17,15 @@
*/
package org.apache.hadoop.hdds.scm.node;
-import javax.management.ObjectName;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-import java.util.Collections;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ScheduledFuture;
-import java.util.stream.Collectors;
-
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.apache.hadoop.hdds.DFSConfigKeysLegacy;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.NodeReportProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.PipelineReportsProto;
@@ -67,15 +56,25 @@
import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
import org.apache.hadoop.ozone.protocol.commands.SetNodeOperationalStateCommand;
import org.apache.hadoop.util.ReflectionUtils;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
import org.apache.hadoop.util.Time;
-import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.management.ObjectName;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ScheduledFuture;
+import java.util.stream.Collectors;
+
/**
* Maintains information about the Datanodes on SCM side.
* <p>
@@ -532,6 +531,41 @@
}
/**
+ * Gets a sorted list of most or least used DatanodeUsageInfo containing
+ * healthy, in-service nodes. If the specified mostUsed is true, the returned
+ * list is in descending order of usage. Otherwise, the returned list is in
+ * ascending order of usage.
+ *
+ * @param mostUsed true if most used, false if least used
+ * @return List of DatanodeUsageInfo
+ */
+ public List<DatanodeUsageInfo> getMostOrLeastUsedDatanodes(
+ boolean mostUsed) {
+ List<DatanodeDetails> healthyNodes =
+ getNodes(NodeOperationalState.IN_SERVICE, NodeState.HEALTHY);
+
+ List<DatanodeUsageInfo> datanodeUsageInfoList =
+ new ArrayList<>(healthyNodes.size());
+
+ // create a DatanodeUsageInfo from each DatanodeDetails and add it to the
+ // list
+ for (DatanodeDetails node : healthyNodes) {
+ SCMNodeStat stat = getNodeStatInternal(node);
+ datanodeUsageInfoList.add(new DatanodeUsageInfo(node, stat));
+ }
+
+ // sort the list according to appropriate comparator
+ if (mostUsed) {
+ datanodeUsageInfoList.sort(
+ DatanodeUsageInfo.getMostUsedByRemainingRatio().reversed());
+ } else {
+ datanodeUsageInfoList.sort(
+ DatanodeUsageInfo.getMostUsedByRemainingRatio());
+ }
+ return datanodeUsageInfoList;
+ }
+
+ /**
* Return the node stat of the specified datanode.
*
* @param datanodeDetails - datanode ID.
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java
index af8db21..ab110c3 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hdds.scm.protocol;
-import com.google.common.base.Strings;
import com.google.protobuf.ProtocolMessageEnum;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
@@ -572,21 +571,17 @@
public DatanodeUsageInfoResponseProto getDatanodeUsageInfo(
StorageContainerLocationProtocolProtos.DatanodeUsageInfoRequestProto
request) throws IOException {
- String ipaddress = null;
- String uuid = null;
- if (request.hasIpaddress()) {
- ipaddress = request.getIpaddress();
- }
- if (request.hasUuid()) {
- uuid = request.getUuid();
- }
- if (Strings.isNullOrEmpty(ipaddress) && Strings.isNullOrEmpty(uuid)) {
- throw new IOException("No ip or uuid specified");
+ List<HddsProtos.DatanodeUsageInfoProto> infoList;
+
+ // get info by ip or uuid
+ if (request.hasUuid() || request.hasIpaddress()) {
+ infoList = impl.getDatanodeUsageInfo(request.getIpaddress(),
+ request.getUuid());
+ } else { // get most or least used nodes
+ infoList = impl.getDatanodeUsageInfo(request.getMostUsed(),
+ request.getCount());
}
- List<HddsProtos.DatanodeUsageInfo> infoList;
- infoList = impl.getDatanodeUsageInfo(ipaddress,
- uuid);
return DatanodeUsageInfoResponseProto.newBuilder()
.addAllInfo(infoList)
.build();
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
index b8f7fbc..315826c 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
@@ -43,6 +43,7 @@
import org.apache.hadoop.hdds.scm.events.SCMEvents;
import org.apache.hadoop.hdds.scm.exceptions.SCMException;
import org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes;
+import org.apache.hadoop.hdds.scm.node.DatanodeUsageInfo;
import org.apache.hadoop.hdds.scm.node.NodeStatus;
import org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
@@ -648,16 +649,17 @@
}
/**
- * Get Datanode usage info (such as capacity, used) by ip or uuid.
+ * Get Datanode usage info such as capacity, SCMUsed, and remaining by ip
+ * or uuid.
*
- * @param ipaddress - Datanode Address String
- * @param uuid - Datanode UUID String
- * @return List of DatanodeUsageInfo. Each element contains usage info such
- * as capacity, SCMUsed, and remaining space.
- * @throws IOException
+ * @param ipaddress Datanode Address String
+ * @param uuid Datanode UUID String
+ * @return List of DatanodeUsageInfoProto. Each element contains usage info
+ * such as capacity, SCMUsed, and remaining space.
+ * @throws IOException if admin authentication fails
*/
@Override
- public List<HddsProtos.DatanodeUsageInfo> getDatanodeUsageInfo(
+ public List<HddsProtos.DatanodeUsageInfoProto> getDatanodeUsageInfo(
String ipaddress, String uuid) throws IOException {
// check admin authorisation
@@ -682,7 +684,7 @@
}
// get datanode usage info
- List<HddsProtos.DatanodeUsageInfo> infoList = new ArrayList<>();
+ List<HddsProtos.DatanodeUsageInfoProto> infoList = new ArrayList<>();
for (DatanodeDetails node : nodes) {
infoList.add(getUsageInfoFromDatanodeDetails(node));
}
@@ -693,25 +695,69 @@
/**
* Get usage details for a specific DatanodeDetails node.
*
- * @param node - DatanodeDetails
+ * @param node DatanodeDetails
* @return Usage info such as capacity, SCMUsed, and remaining space.
- * @throws IOException
*/
- private HddsProtos.DatanodeUsageInfo getUsageInfoFromDatanodeDetails(
- DatanodeDetails node) throws IOException {
+ private HddsProtos.DatanodeUsageInfoProto getUsageInfoFromDatanodeDetails(
+ DatanodeDetails node) {
SCMNodeStat stat = scm.getScmNodeManager().getNodeStat(node).get();
long capacity = stat.getCapacity().get();
long used = stat.getScmUsed().get();
long remaining = stat.getRemaining().get();
- HddsProtos.DatanodeUsageInfo info = HddsProtos.DatanodeUsageInfo
- .newBuilder()
+ return HddsProtos.DatanodeUsageInfoProto.newBuilder()
.setCapacity(capacity)
.setUsed(used)
.setRemaining(remaining)
+ .setNode(node.toProto(node.getCurrentVersion()))
.build();
- return info;
+ }
+
+ /**
+ * Get a sorted list of most or least used DatanodeUsageInfo containing
+ * healthy, in-service nodes.
+ *
+ * @param mostUsed true if most used, false if least used
+ * @param count number of nodes to get; must be an integer greater than zero
+ * @return List of DatanodeUsageInfoProto. Each element contains usage info
+ * such as capacity, SCMUsed, and remaining space.
+ * @throws IOException if admin authentication fails
+ * @throws IllegalArgumentException if count is not an integer greater than
+ * zero
+ */
+ @Override
+ public List<HddsProtos.DatanodeUsageInfoProto> getDatanodeUsageInfo(
+ boolean mostUsed, int count) throws IOException, IllegalArgumentException{
+
+ // check admin authorisation
+ String remoteUser = getRpcRemoteUsername();
+ try {
+ getScm().checkAdminAccess(remoteUser);
+ } catch (IOException e) {
+ LOG.error("Authorisation failed", e);
+ throw e;
+ }
+
+ if (count < 1) {
+ throw new IllegalArgumentException("The specified parameter count must " +
+ "be an integer greater than zero.");
+ }
+
+ List<DatanodeUsageInfo> datanodeUsageInfoList =
+ scm.getScmNodeManager().getMostOrLeastUsedDatanodes(mostUsed);
+
+ // if count is greater than the size of list containing healthy,
+ // in-service nodes, just set count to that size
+ if (count > datanodeUsageInfoList.size()) {
+ count = datanodeUsageInfoList.size();
+ }
+
+ // return count number of DatanodeUsageInfoProto
+ return datanodeUsageInfoList.stream()
+ .map(DatanodeUsageInfo::toProto)
+ .limit(count)
+ .collect(Collectors.toList());
}
/**
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
index ddd4aef..bb49016 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
@@ -24,6 +24,7 @@
import org.apache.hadoop.hdds.scm.net.NetworkTopology;
import org.apache.hadoop.hdds.scm.net.NetworkTopologyImpl;
import org.apache.hadoop.hdds.scm.net.Node;
+import org.apache.hadoop.hdds.scm.node.DatanodeUsageInfo;
import org.apache.hadoop.hdds.scm.node.NodeStatus;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
@@ -268,6 +269,21 @@
}
/**
+ * Gets a sorted list of most or least used DatanodeUsageInfo containing
+ * healthy, in-service nodes. If the specified mostUsed is true, the returned
+ * list is in descending order of usage. Otherwise, the returned list is in
+ * ascending order of usage.
+ *
+ * @param mostUsed true if most used, false if least used
+ * @return List of DatanodeUsageInfo
+ */
+ @Override
+ public List<DatanodeUsageInfo> getMostOrLeastUsedDatanodes(
+ boolean mostUsed) {
+ return null;
+ }
+
+ /**
* Return the node stat of the specified datanode.
* @param datanodeDetails - datanode details.
* @return node stat if it is live/stale, null if it is decommissioned or
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java
index be4ab57..774d706 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java
@@ -24,6 +24,7 @@
import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat;
import org.apache.hadoop.hdds.scm.net.NetworkTopology;
import org.apache.hadoop.hdds.scm.node.DatanodeInfo;
+import org.apache.hadoop.hdds.scm.node.DatanodeUsageInfo;
import org.apache.hadoop.hdds.scm.node.NodeManager;
import org.apache.hadoop.hdds.scm.node.NodeStatus;
import org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException;
@@ -211,6 +212,20 @@
return null;
}
+ /**
+ * Gets a sorted list of most or least used DatanodeUsageInfo containing
+ * healthy, in-service nodes. If the specified mostUsed is true, the returned
+ * list is in descending order of usage. Otherwise, the returned list is in
+ * ascending order of usage.
+ *
+ * @param mostUsed true if most used, false if least used
+ * @return List of DatanodeUsageInfo
+ */
+ @Override
+ public List<DatanodeUsageInfo> getMostOrLeastUsedDatanodes(boolean mostUsed) {
+ return null;
+ }
+
@Override
public SCMNodeMetric getNodeStat(DatanodeDetails datanodeDetails) {
return null;
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java
index 048b953..3aba29f 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java
@@ -21,6 +21,7 @@
import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.PipelineReportsProto;
import org.apache.hadoop.hdds.scm.container.ContainerID;
+import org.apache.hadoop.hdds.scm.node.DatanodeUsageInfo;
import org.apache.hadoop.hdds.scm.node.NodeStatus;
import org.apache.hadoop.hdds.scm.net.NetworkTopology;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
@@ -161,6 +162,20 @@
}
/**
+ * Gets a sorted list of most or least used DatanodeUsageInfo containing
+ * healthy, in-service nodes. If the specified mostUsed is true, the returned
+ * list is in descending order of usage. Otherwise, the returned list is in
+ * ascending order of usage.
+ *
+ * @param mostUsed true if most used, false if least used
+ * @return List of DatanodeUsageInfo
+ */
+ @Override
+ public List<DatanodeUsageInfo> getMostOrLeastUsedDatanodes(boolean mostUsed) {
+ return null;
+ }
+
+ /**
* Return the node stat of the specified datanode.
*
* @param dd - datanode details.
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java
index 8657667..3f4c55c 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java
@@ -549,17 +549,32 @@
/**
* Get Datanode Usage information by ipaddress or uuid.
*
- * @param ipaddress - datanode ipaddress String
- * @param uuid - datanode uuid String
- * @return List of DatanodeUsageInfo. Each element contains info such as
+ * @param ipaddress datanode ipaddress String
+ * @param uuid datanode uuid String
+ * @return List of DatanodeUsageInfoProto. Each element contains info such as
* capacity, SCMused, and remaining space.
* @throws IOException
*/
@Override
- public List<HddsProtos.DatanodeUsageInfo> getDatanodeUsageInfo(
+ public List<HddsProtos.DatanodeUsageInfoProto> getDatanodeUsageInfo(
String ipaddress, String uuid) throws IOException {
return storageContainerLocationClient.getDatanodeUsageInfo(ipaddress,
uuid);
}
+ /**
+ * Get usage information of most or least used datanodes.
+ *
+ * @param mostUsed true if most used, false if least used
+ * @param count Integer number of nodes to get info for
+ * @return List of DatanodeUsageInfoProto. Each element contains info such as
+ * capacity, SCMUsed, and remaining space.
+ * @throws IOException
+ */
+ @Override
+ public List<HddsProtos.DatanodeUsageInfoProto> getDatanodeUsageInfo(
+ boolean mostUsed, int count) throws IOException {
+ return storageContainerLocationClient.getDatanodeUsageInfo(mostUsed, count);
+ }
+
}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java
index ff8d624..113ff98 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java
@@ -41,58 +41,69 @@
versionProvider = HddsVersionProvider.class)
public class UsageInfoSubcommand extends ScmSubcommand {
- @CommandLine.Option(names = {"--ip"}, paramLabel = "IP", description =
- "Show info by datanode ip address")
- private String ipaddress;
+ @CommandLine.ArgGroup(multiplicity = "1")
+ private ExclusiveArguments exclusiveArguments;
- @CommandLine.Option(names = {"--uuid"}, paramLabel = "UUID", description =
- "Show info by datanode UUID")
- private String uuid;
+ private static class ExclusiveArguments {
+ @CommandLine.Option(names = {"--ip"}, paramLabel = "IP", description =
+ "Show info by datanode ip address.", defaultValue = "")
+ private String ipaddress;
- public String getIpaddress() {
- return ipaddress;
+ @CommandLine.Option(names = {"--uuid"}, paramLabel = "UUID", description =
+ "Show info by datanode UUID.", defaultValue = "")
+ private String uuid;
+
+ @CommandLine.Option(names = {"-m", "--most-used"},
+ description = "Show the most used datanodes.",
+ defaultValue = "false")
+ private boolean mostUsed;
+
+ @CommandLine.Option(names = {"-l", "--least-used"},
+ description = "Show the least used datanodes.",
+ defaultValue = "false")
+ private boolean leastUsed;
}
- public void setIpaddress(String ipaddress) {
- this.ipaddress = ipaddress;
- }
+ @CommandLine.Option(names = {"-c", "--count"}, description = "Number of " +
+ "datanodes to display (Default: ${DEFAULT-VALUE}).",
+ paramLabel = "NUMBER OF NODES", defaultValue = "3")
+ private int count;
- public String getUuid() {
- return uuid;
- }
-
- public void setUuid(String uuid) {
- this.uuid = uuid;
- }
@Override
public void execute(ScmClient scmClient) throws IOException {
- if (Strings.isNullOrEmpty(ipaddress)) {
- ipaddress = "";
- }
- if (Strings.isNullOrEmpty(uuid)) {
- uuid = "";
- }
- if (Strings.isNullOrEmpty(ipaddress) && Strings.isNullOrEmpty(uuid)) {
- throw new IOException("ipaddress or uuid of the datanode must be " +
- "specified.");
+ List<HddsProtos.DatanodeUsageInfoProto> infoList;
+ if (count < 1) {
+ throw new IOException("Count must be an integer greater than 0.");
}
- List<HddsProtos.DatanodeUsageInfo> infoList =
- scmClient.getDatanodeUsageInfo(ipaddress, uuid);
-
- for (HddsProtos.DatanodeUsageInfo info : infoList) {
- printInfo(info);
+ // fetch info by ip or uuid
+ if (!Strings.isNullOrEmpty(exclusiveArguments.ipaddress) ||
+ !Strings.isNullOrEmpty(exclusiveArguments.uuid)) {
+ infoList = scmClient.getDatanodeUsageInfo(exclusiveArguments.ipaddress,
+ exclusiveArguments.uuid);
+ } else { // get info of most used or least used nodes
+ infoList = scmClient.getDatanodeUsageInfo(exclusiveArguments.mostUsed,
+ count);
}
+
+ infoList.forEach(this::printInfo);
}
- public void printInfo(HddsProtos.DatanodeUsageInfo info) {
- Double capacity = (double)info.getCapacity();
- Double usedRatio = info.getUsed() / capacity;
- Double remainingRatio = info.getRemaining() / capacity;
+ /**
+ * Print datanode usage information.
+ *
+ * @param info Information such as Capacity, SCMUsed etc.
+ */
+ public void printInfo(HddsProtos.DatanodeUsageInfoProto info) {
+ double capacity = (double) info.getCapacity();
+ double usedRatio = info.getUsed() / capacity;
+ double remainingRatio = info.getRemaining() / capacity;
NumberFormat percentFormat = NumberFormat.getPercentInstance();
percentFormat.setMinimumFractionDigits(5);
+ System.out.printf("Usage info for datanode with UUID %s:%n",
+ info.getNode().getUuid());
System.out.printf("%-10s: %20sB %n", "Capacity", info.getCapacity());
System.out.printf("%-10s: %20sB (%s) %n", "SCMUsed", info.getUsed(),
percentFormat.format(usedRatio));