OAK-8580: Add distinct cloud data store loggers for stream activity

This change allows for specific monitoring of specific named loggers
that indicate where cloud data stores are streaming bytes to or
from cloud storage.  Useful for instances that are trying to
identify where upload or download streaming through the repository
is taking place for those that are attempting to replace such
activity with direct binary access in clients.


git-svn-id: https://svn.apache.org/repos/asf/jackrabbit/oak/trunk@1866061 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java b/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
index 14d3740..7079ed4 100644
--- a/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
+++ b/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
@@ -91,6 +91,8 @@
 public class AzureBlobStoreBackend extends AbstractSharedBackend {
 
     private static final Logger LOG = LoggerFactory.getLogger(AzureBlobStoreBackend.class);
+    private static final Logger LOG_STREAMS_DOWNLOAD = LoggerFactory.getLogger("oak.datastore.download.streams");
+    private static final Logger LOG_STREAMS_UPLOAD = LoggerFactory.getLogger("oak.datastore.upload.streams");
 
     private static final String META_DIR_NAME = "META";
     private static final String META_KEY_PREFIX = META_DIR_NAME + "/";
@@ -220,6 +222,10 @@
 
             InputStream is = blob.openInputStream();
             LOG.debug("Got input stream for blob. identifier={} duration={}", key, (System.currentTimeMillis() - start));
+            if (LOG_STREAMS_DOWNLOAD.isDebugEnabled()) {
+                // Log message, with exception so we can get a trace to see where the call came from
+                LOG_STREAMS_DOWNLOAD.debug("Binary downloaded from Azure Blob Storage - identifier={}", key, new Exception());
+            }
             return is;
         }
         catch (StorageException e) {
@@ -262,6 +268,10 @@
                 try {
                     blob.upload(in, len, null, options, null);
                     LOG.debug("Blob created. identifier={} length={} duration={} buffered={}", key, len, (System.currentTimeMillis() - start), useBufferedStream);
+                    if (LOG_STREAMS_UPLOAD.isDebugEnabled()) {
+                        // Log message, with exception so we can get a trace to see where the call came from
+                        LOG_STREAMS_UPLOAD.debug("Binary uploaded to Azure Blob Storage - identifier={}", key, new Exception());
+                    }
                 } finally {
                     in.close();
                 }
@@ -1203,11 +1213,12 @@
             if (isMeta) {
                 id = addMetaKeyPrefix(getIdentifier().toString());
             }
-            if (LOG.isDebugEnabled()) {
-                // Log message, with exception so we can get a trace to see where the call
-                // came from
-                LOG.debug("binary downloaded from Azure Blob Storage: " + getIdentifier(),
-                        new Exception());
+            else {
+                // Don't worry about stream logging for metadata records
+                if (LOG_STREAMS_DOWNLOAD.isDebugEnabled()) {
+                    // Log message, with exception so we can get a trace to see where the call came from
+                    LOG_STREAMS_DOWNLOAD.debug("Binary downloaded from Azure Blob Storage - identifier={} ", id, new Exception());
+                }
             }
             try {
                 return container.getBlockBlobReference(id).openInputStream();
diff --git a/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java b/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java
index 3789723..78c6258 100644
--- a/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java
+++ b/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java
@@ -106,6 +106,8 @@
      * Logger instance.
      */
     private static final Logger LOG = LoggerFactory.getLogger(S3Backend.class);
+    private static final Logger LOG_STREAMS_DOWNLOAD = LoggerFactory.getLogger("oak.datastore.download.streams");
+    private static final Logger LOG_STREAMS_UPLOAD = LoggerFactory.getLogger("oak.datastore.upload.streams");
 
     private static final String KEY_PREFIX = "dataStore_";
 
@@ -333,6 +335,10 @@
                     // start multipart parallel upload using amazon sdk
                     Upload up = tmx.upload(s3ReqDecorator.decorate(new PutObjectRequest(
                         bucket, key, file)));
+                    if (LOG_STREAMS_UPLOAD.isDebugEnabled()) {
+                        // Log message, with exception so we can get a trace to see where the call came from
+                        LOG_STREAMS_UPLOAD.debug("Binary uploaded to S3 - identifier={}", key, new Exception());
+                    }
                     // wait for upload to finish
                     up.waitForUploadResult();
                     LOG.debug("synchronous upload to identifier [{}] completed.", identifier);
@@ -393,8 +399,9 @@
             S3Object object = s3service.getObject(bucket, key);
             InputStream in = object.getObjectContent();
             LOG.debug("[{}] read took [{}]ms", identifier, (System.currentTimeMillis() - start));
-            if (LOG.isDebugEnabled()) {
-                LOG.debug("binary downloaded from S3: " + identifier, new Exception());
+            if (LOG_STREAMS_DOWNLOAD.isDebugEnabled()) {
+                // Log message, with exception so we can get a trace to see where the call came from
+                LOG_STREAMS_DOWNLOAD.debug("Binary downloaded from S3 - identifier={}", key, new Exception());
             }
             return in;
         } catch (AmazonServiceException e) {
@@ -1156,10 +1163,12 @@
             if (isMeta) {
                 id = addMetaKeyPrefix(getIdentifier().toString());
             }
-            if (LOG.isDebugEnabled()) {
-                // Log message, with exception so we can get a trace to see where the call
-                // came from
-                LOG.debug("binary downloaded from S3: " + getIdentifier(), new Exception());
+            else {
+                // Don't worry about stream logging for metadata records
+                if (LOG_STREAMS_DOWNLOAD.isDebugEnabled()) {
+                    // Log message, with exception so we can get a trace to see where the call came from
+                    LOG_STREAMS_DOWNLOAD.debug("Binary downloaded from S3 - identifier={}", id, new Exception());
+                }
             }
             return s3service.getObject(bucket, id).getObjectContent();
         }