HDFS-17028. RBF: Optimize debug logs of class ConnectionPool and other related class. (#5694). Contributed by farmmamba.

Reviewed-by: Inigo Goiri <inigoiri@apache.org>
Signed-off-by: Ayush Saxena <ayushsaxena@apache.org>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionContext.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionContext.java
index df026bb..035d46f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionContext.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionContext.java
@@ -150,8 +150,8 @@
       // this is an erroneous case, but we have to close the connection
       // anyway since there will be connection leak if we don't do so
       // the connection has been moved out of the pool
-      LOG.error("Active connection with {} handlers will be closed",
-          this.numThreads);
+      LOG.error("Active connection with {} handlers will be closed, ConnectionContext is {}",
+          this.numThreads, this);
     }
     this.closed = true;
     Object proxy = this.client.getProxy();
@@ -170,7 +170,10 @@
     Class<?> clazz = proxy.getClass();
 
     StringBuilder sb = new StringBuilder();
-    sb.append(clazz.getSimpleName())
+    sb.append("hashcode:")
+        .append(hashCode())
+        .append(" ")
+        .append(clazz.getSimpleName())
         .append("@")
         .append(addr)
         .append("x")
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionManager.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionManager.java
index 8d0b77b..8982a15 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionManager.java
@@ -482,7 +482,7 @@
                   pool.getMaxSize(), pool);
             }
           } catch (IOException e) {
-            LOG.error("Cannot create a new connection", e);
+            LOG.error("Cannot create a new connection for {} {}", pool, e);
           }
         } catch (InterruptedException e) {
           LOG.error("The connection creator was interrupted");
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionPool.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionPool.java
index 8fbca50..c13debf 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionPool.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionPool.java
@@ -286,8 +286,8 @@
       }
       this.connections = tmpConnections;
     }
-    LOG.debug("Expected to remove {} connection and actually removed {} connections",
-        num, removed.size());
+    LOG.debug("Expected to remove {} connection and actually removed {} connections "
+        + "for connectionPool: {}", num, removed.size(), connectionPoolId);
     return removed;
   }