[AMBARI-24066] Metrics Collector issues modify HTable descriptor calls every time it restarts. (#1503)

diff --git a/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/HBaseTimelineMetricsService.java b/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/HBaseTimelineMetricsService.java
index 2be9b01..8dfd651 100644
--- a/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/HBaseTimelineMetricsService.java
+++ b/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/HBaseTimelineMetricsService.java
@@ -310,7 +310,7 @@
         Function function = new Function(readFunction, null);
         conditionBuilder.topNFunction(function);
       } else {
-        LOG.info("Invalid Input for TopN query. Ignoring TopN Request.");
+        LOG.debug("Invalid Input for TopN query. Ignoring TopN Request.");
       }
     }
   }
diff --git a/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/PhoenixHBaseAccessor.java b/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/PhoenixHBaseAccessor.java
index 600114b..e3da8b2 100644
--- a/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/PhoenixHBaseAccessor.java
+++ b/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/PhoenixHBaseAccessor.java
@@ -604,7 +604,8 @@
     }
   }
 
-  void initPoliciesAndTTL() {
+  boolean initPoliciesAndTTL() {
+    boolean modifyAnyTable = false;
     Admin hBaseAdmin = null;
     try {
       hBaseAdmin = dataSource.getHBaseAdmin();
@@ -622,11 +623,11 @@
         tableNames = (TableName[]) ArrayUtils.addAll(tableNames, containerMetricsTableName);
       } catch (IOException e) {
         LOG.warn("Unable to get table names from HBaseAdmin for setting policies.", e);
-        return;
+        return false;
       }
       if (tableNames == null || tableNames.length == 0) {
         LOG.warn("Unable to get table names from HBaseAdmin for setting policies.");
-        return;
+        return false;
       }
       for (String tableName : PHOENIX_TABLES) {
         try {
@@ -657,12 +658,12 @@
           }
 
           //Set durability preferences
-          boolean durabilitySettingsModified = setDurabilityForTable(tableName, tableDescriptorBuilder);
+          boolean durabilitySettingsModified = setDurabilityForTable(tableName, tableDescriptorBuilder, tableDescriptor);
           modifyTable = modifyTable || durabilitySettingsModified;
 
           //Set compaction policy preferences
           boolean compactionPolicyModified = false;
-          compactionPolicyModified = setCompactionPolicyForTable(tableName, tableDescriptorBuilder);
+          compactionPolicyModified = setCompactionPolicyForTable(tableName, tableDescriptorBuilder, tableDescriptor);
           modifyTable = modifyTable || compactionPolicyModified;
 
           // Change TTL setting to match user configuration
@@ -680,13 +681,14 @@
                   tableTTL.get(tableName) + " seconds.");
 
                 hBaseAdmin.modifyColumnFamily(tableNameOptional.get(), familyDescriptorBuilder.build());
-                // modifyTable = true;
+                modifyTable = true;
               }
             }
           }
 
           // Persist only if anything changed
           if (modifyTable) {
+            modifyAnyTable = modifyTable;
             hBaseAdmin.modifyTable(tableNameOptional.get(), tableDescriptorBuilder.build());
           }
 
@@ -700,21 +702,27 @@
         LOG.warn("Exception on HBaseAdmin close.", e);
       }
     }
+    return modifyAnyTable;
   }
 
-  private boolean setDurabilityForTable(String tableName, TableDescriptorBuilder tableDescriptor) {
+  private boolean setDurabilityForTable(String tableName, TableDescriptorBuilder tableDescriptorBuilder, TableDescriptor tableDescriptor) {
     String tableDurability = metricsConf.get("timeline.metrics." + tableName + ".durability", "");
+
+    if (StringUtils.isEmpty(tableDurability) || tableDescriptor.getDurability().toString().equals(tableDurability)) {
+      return false;
+    }
+
     if (StringUtils.isNotEmpty(tableDurability)) {
       LOG.info("Setting WAL option " + tableDurability + " for table : " + tableName);
       boolean validDurability = true;
       if ("SKIP_WAL".equals(tableDurability)) {
-        tableDescriptor.setDurability(Durability.SKIP_WAL);
+        tableDescriptorBuilder.setDurability(Durability.SKIP_WAL);
       } else if ("SYNC_WAL".equals(tableDurability)) {
-        tableDescriptor.setDurability(Durability.SYNC_WAL);
+        tableDescriptorBuilder.setDurability(Durability.SYNC_WAL);
       } else if ("ASYNC_WAL".equals(tableDurability)) {
-        tableDescriptor.setDurability(Durability.ASYNC_WAL);
+        tableDescriptorBuilder.setDurability(Durability.ASYNC_WAL);
       } else if ("FSYNC_WAL".equals(tableDurability)) {
-        tableDescriptor.setDurability(Durability.FSYNC_WAL);
+        tableDescriptorBuilder.setDurability(Durability.FSYNC_WAL);
       } else {
         LOG.info("Unknown value for durability : " + tableDurability);
         validDurability = false;
@@ -725,7 +733,9 @@
   }
 
 
-  private boolean setCompactionPolicyForTable(String tableName, TableDescriptorBuilder tableDescriptorBuilder) {
+  private boolean setCompactionPolicyForTable(String tableName,
+                                              TableDescriptorBuilder tableDescriptorBuilder,
+                                              TableDescriptor tableDescriptor) {
 
     boolean modifyTable = false;
 
@@ -743,13 +753,11 @@
       blockingStoreFiles = hbaseConf.getInt(storeFilesConfig, 1000);
     }
 
-    if (StringUtils.isEmpty(compactionPolicyKey) || StringUtils.isEmpty(compactionPolicyClass)) {
-      // Default blockingStoreFiles = 300
-      modifyTable = setHbaseBlockingStoreFiles(tableDescriptorBuilder, tableName, 300);
-    } else {
+    if (!compactionPolicyClass.equals(tableDescriptor.getValue(compactionPolicyKey))) {
       tableDescriptorBuilder.setValue(compactionPolicyKey, compactionPolicyClass);
-      setHbaseBlockingStoreFiles(tableDescriptorBuilder, tableName, blockingStoreFiles);
+      setHbaseBlockingStoreFiles(tableDescriptorBuilder, tableDescriptor, tableName, blockingStoreFiles);
       modifyTable = true;
+      LOG.info("Setting compaction policy for " + tableName + ", " + compactionPolicyKey + "=" + compactionPolicyClass);
     }
 
     if (!compactionPolicyKey.equals(HSTORE_ENGINE_CLASS)) {
@@ -762,11 +770,15 @@
     return modifyTable;
   }
 
-  private boolean setHbaseBlockingStoreFiles(TableDescriptorBuilder tableDescriptor, String tableName, int value) {
-    tableDescriptor.setValue(BLOCKING_STORE_FILES_KEY, String.valueOf(value));
-    LOG.info("Setting config property " + BLOCKING_STORE_FILES_KEY +
-      " = " + value + " for " + tableName);
-    return true;
+  private boolean setHbaseBlockingStoreFiles(TableDescriptorBuilder tableDescriptorBuilder,
+                                             TableDescriptor tableDescriptor, String tableName, int value) {
+    if (!String.valueOf(value).equals(tableDescriptor.getValue(BLOCKING_STORE_FILES_KEY))) {
+      tableDescriptorBuilder.setValue(BLOCKING_STORE_FILES_KEY, String.valueOf(value));
+      LOG.info("Setting config property " + BLOCKING_STORE_FILES_KEY +
+        " = " + value + " for " + tableName);
+      return true;
+    }
+    return false;
   }
 
 
@@ -775,7 +787,7 @@
                                                                List<byte[]> splitPoints) throws SQLException {
 
     String createTableWithSplitPointsSql = sql + getSplitPointsStr(splitPoints.size());
-    LOG.info(createTableWithSplitPointsSql);
+    LOG.debug(createTableWithSplitPointsSql);
     PreparedStatement statement = connection.prepareStatement(createTableWithSplitPointsSql);
     for (int i = 1; i <= splitPoints.size(); i++) {
       statement.setBytes(i, splitPoints.get(i - 1));
@@ -1043,6 +1055,11 @@
     String metricName = metadataManagerInstance.getMetricNameFromUuid(uuid);
     Collection<List<Function>> functionList = findMetricFunctions(metricFunctions, metricName);
 
+    if (CollectionUtils.isEmpty(functionList)) {
+      LOG.warn("No metric name or pattern in GET query matched the metric name from the metric store : " + metricName);
+      return;
+    }
+
     for (List<Function> functions : functionList) {
       // Apply aggregation function if present
       if ((functions != null && !functions.isEmpty())) {
diff --git a/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricConfiguration.java b/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricConfiguration.java
index 393d4a3..eb09895 100644
--- a/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricConfiguration.java
+++ b/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricConfiguration.java
@@ -258,6 +258,9 @@
   public static final String TIMELINE_METRICS_WHITELIST_ENABLED =
     "timeline.metrics.whitelisting.enabled";
 
+  public static final String TIMELINE_METRICS_BLACKLIST_FILE =
+        "timeline.metrics.blacklist.file";
+
   public static final String TIMELINE_METRICS_WHITELIST_FILE =
     "timeline.metrics.whitelist.file";
 
diff --git a/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricsFilter.java b/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricsFilter.java
index b2d5fd9..1364e71 100644
--- a/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricsFilter.java
+++ b/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricsFilter.java
@@ -18,6 +18,8 @@
 
 package org.apache.ambari.metrics.core.timeline;
 
+import static org.apache.ambari.metrics.core.timeline.TimelineMetricConfiguration.TIMELINE_METRICS_BLACKLIST_FILE;
+
 import java.io.BufferedReader;
 import java.io.FileInputStream;
 import java.io.IOException;
@@ -40,7 +42,11 @@
   private static Set<String> whitelistedMetrics;
   private static Set<Pattern> whitelistedMetricPatterns;
   private static Set<String> whitelistedApps;
+
+  private static Set<String> blacklistedMetrics;
+  private static Set<Pattern> blacklistedPatterns;
   private static Set<String> blacklistedApps;
+
   private static String patternPrefix = "._p_";
   private static Set<String> amshbaseWhitelist;
 
@@ -60,11 +66,25 @@
     whitelistedMetricPatterns = new HashSet<Pattern>();
     blacklistedApps = new HashSet<>();
     whitelistedApps = new HashSet<>();
+
+    blacklistedMetrics = new HashSet<>();
+    blacklistedPatterns = new HashSet<>();
+    blacklistedApps = new HashSet<>();
+
     amshbaseWhitelist = new HashSet<>();
 
     if (configuration.isWhitelistingEnabled()) {
       String whitelistFile = metricsConf.get(TimelineMetricConfiguration.TIMELINE_METRICS_WHITELIST_FILE, TimelineMetricConfiguration.TIMELINE_METRICS_WHITELIST_FILE_LOCATION_DEFAULT);
-      readMetricWhitelistFromFile(whitelistFile);
+      readMetricWhitelistFromFile(whitelistedMetrics, whitelistedMetricPatterns, whitelistFile);
+      LOG.info("Whitelisting " + whitelistedMetrics.size() + " metrics");
+      LOG.debug("Whitelisted metrics : " + Arrays.toString(whitelistedMetrics.toArray()));
+    }
+
+    String blacklistFile = metricsConf.get(TIMELINE_METRICS_BLACKLIST_FILE, "");
+    if (!StringUtils.isEmpty(blacklistFile)) {
+      readMetricWhitelistFromFile(blacklistedMetrics, blacklistedPatterns, blacklistFile);
+      LOG.info("Blacklisting " + blacklistedMetrics.size() + " metrics");
+      LOG.debug("Blacklisted metrics : " + Arrays.toString(blacklistedMetrics.toArray()));
     }
 
     String appsBlacklist = metricsConf.get(TimelineMetricConfiguration.TIMELINE_METRICS_APPS_BLACKLIST, "");
@@ -89,7 +109,7 @@
     }
   }
 
-  private static void readMetricWhitelistFromFile(String whitelistFile) {
+  private static void readMetricWhitelistFromFile(Set<String> metricList, Set<Pattern> patternList, String whitelistFile) {
 
     BufferedReader br = null;
     String strLine;
@@ -103,17 +123,15 @@
           continue;
         }
         if (strLine.startsWith(patternPrefix)) {
-          whitelistedMetricPatterns.add(Pattern.compile(strLine.substring(patternPrefix.length())));
+          patternList.add(Pattern.compile(strLine.substring(patternPrefix.length())));
         } else {
-          whitelistedMetrics.add(strLine);
+          metricList.add(strLine);
         }
       }
     } catch (IOException ioEx) {
-      LOG.error("Unable to parse metric whitelist file", ioEx);
+      LOG.error("Unable to parse metric file", ioEx);
     }
 
-    LOG.info("Whitelisting " + whitelistedMetrics.size() + " metrics");
-    LOG.debug("Whitelisted metrics : " + Arrays.toString(whitelistedMetrics.toArray()));
   }
 
   public static boolean acceptMetric(String metricName, String appId) {
@@ -132,6 +150,21 @@
       return false;
     }
 
+    //Metric Blacklisting
+    if (CollectionUtils.isNotEmpty(blacklistedMetrics) || CollectionUtils.isNotEmpty(blacklistedPatterns)) {
+      if (blacklistedMetrics.contains(metricName)) {
+        return false;
+      }
+
+      for (Pattern p : blacklistedPatterns) {
+        Matcher m = p.matcher(metricName);
+        if (m.find()) {
+          blacklistedMetrics.add(metricName);
+          return false;
+        }
+      }
+    }
+
     //Special Case appId = ams-hbase whitelisting.
     if ("ams-hbase".equals(appId) && CollectionUtils.isNotEmpty(amshbaseWhitelist)) {
       return amshbaseWhitelist.contains(metric.getMetricName());
diff --git a/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/aggregators/AbstractTimelineAggregator.java b/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/aggregators/AbstractTimelineAggregator.java
index f12a597..ffa4827 100644
--- a/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/aggregators/AbstractTimelineAggregator.java
+++ b/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/aggregators/AbstractTimelineAggregator.java
@@ -451,29 +451,7 @@
    * @return
    */
   protected String getDownsampledMetricSkipClause() {
-
-    //TODO Fix downsampling for UUID change.
     return StringUtils.EMPTY;
-
-//    if (CollectionUtils.isEmpty(this.downsampleMetricPatterns)) {
-//      return StringUtils.EMPTY;
-//    }
-//
-//    StringBuilder sb = new StringBuilder();
-//
-//    for (int i = 0; i < downsampleMetricPatterns.size(); i++) {
-//      sb.append(" METRIC_NAME");
-//      sb.append(" NOT");
-//      sb.append(" LIKE ");
-//      sb.append("'" + downsampleMetricPatterns.get(i) + "'");
-//
-//      if (i < downsampleMetricPatterns.size() - 1) {
-//        sb.append(" AND ");
-//      }
-//    }
-//
-//    sb.append(" AND ");
-//    return sb.toString();
   }
 
   /**
diff --git a/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/AbstractMiniHBaseClusterTest.java b/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/AbstractMiniHBaseClusterTest.java
index 26078cb..c544fd0 100644
--- a/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/AbstractMiniHBaseClusterTest.java
+++ b/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/AbstractMiniHBaseClusterTest.java
@@ -26,6 +26,7 @@
 import static org.assertj.core.api.Assertions.assertThat;
 
 import java.io.IOException;
+import java.lang.reflect.Field;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
@@ -98,6 +99,14 @@
   public void setUp() throws Exception {
     Logger.getLogger("org.apache.ambari.metrics.core.timeline").setLevel(Level.DEBUG);
     hdb = createTestableHBaseAccessor();
+
+    //Change default precision table ttl.
+    Field f = PhoenixHBaseAccessor.class.getDeclaredField("tableTTL");
+    f.setAccessible(true);
+    Map<String, Integer> precisionValues = (Map<String, Integer>) f.get(hdb);
+    precisionValues.put(METRICS_RECORD_TABLE_NAME, 2 * 86400);
+    f.set(hdb, precisionValues);
+
     // inits connection, starts mini cluster
     conn = getConnection(getUrl());
 
diff --git a/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/ITPhoenixHBaseAccessor.java b/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/ITPhoenixHBaseAccessor.java
index 20fbc58..dba5c39 100644
--- a/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/ITPhoenixHBaseAccessor.java
+++ b/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/ITPhoenixHBaseAccessor.java
@@ -18,6 +18,7 @@
 package org.apache.ambari.metrics.core.timeline;
 
 import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertFalse;
 import static junit.framework.Assert.assertTrue;
 import static org.apache.ambari.metrics.core.timeline.TimelineMetricConfiguration.DATE_TIERED_COMPACTION_POLICY;
 import static org.apache.ambari.metrics.core.timeline.TimelineMetricConfiguration.FIFO_COMPACTION_POLICY_CLASS;
@@ -362,16 +363,11 @@
   @Test
   public void testInitPoliciesAndTTL() throws Exception {
     Admin hBaseAdmin = hdb.getHBaseAdmin();
-    int precisionTtl = 2 * 86400;
+    int expectedPrecisionTtl = 2 * 86400;
+    int precisionTtl = 0;
 
-    Field f = PhoenixHBaseAccessor.class.getDeclaredField("tableTTL");
-    f.setAccessible(true);
-    Map<String, Integer> precisionValues = (Map<String, Integer>) f.get(hdb);
-    precisionValues.put(METRICS_RECORD_TABLE_NAME, precisionTtl);
-    f.set(hdb, precisionValues);
-
-    hdb.initPoliciesAndTTL();
-
+    boolean modifyTables = hdb.initPoliciesAndTTL();
+    Assert.assertTrue(modifyTables);
     // Verify expected policies are set
     boolean normalizerEnabled = false;
     String precisionTableCompactionPolicy = null;
@@ -423,10 +419,32 @@
     Assert.assertTrue("METRIC_RECORD_UUID Durability Set.", precisionTableDurabilitySet);
     Assert.assertTrue("METRIC_AGGREGATE_UUID Durability Set.", aggregateTableDurabilitySet);
     Assert.assertEquals("FIFO compaction policy is set for METRIC_RECORD_UUID.", FIFO_COMPACTION_POLICY_CLASS, precisionTableCompactionPolicy);
-    Assert.assertEquals("FIFO compaction policy is set for aggregate tables", DATE_TIERED_COMPACTION_POLICY, aggregateTableCompactionPolicy);
-    Assert.assertEquals("Precision TTL value as expected.", 86400, precisionTtl);
+    Assert.assertEquals("Date Tiered compaction policy is set for aggregate tables", DATE_TIERED_COMPACTION_POLICY, aggregateTableCompactionPolicy);
+    Assert.assertEquals("Precision TTL value as expected.", expectedPrecisionTtl, precisionTtl);
 
     hBaseAdmin.close();
+
+    //Try one more time. This time, modifyTable should be 'false'
+    assertFalse(hdb.initPoliciesAndTTL());
+
+    //Change precision durability to
+    Field f = PhoenixHBaseAccessor.class.getDeclaredField("metricsConf");
+    f.setAccessible(true);
+    Configuration newMetricsConf = (Configuration) f.get(hdb);
+    newMetricsConf.set("timeline.metrics." + METRICS_RECORD_TABLE_NAME + ".durability", "ASYNC_WAL");
+    f.set(hdb, newMetricsConf);
+
+    boolean modifyTable = hdb.initPoliciesAndTTL();
+    Assert.assertTrue(modifyTable);
+    TableName[] tableNames = hBaseAdmin.listTableNames(PHOENIX_TABLES_REGEX_PATTERN, false);
+
+    Optional<TableName> tableNameOptional = Arrays.stream(tableNames)
+      .filter(t -> METRICS_RECORD_TABLE_NAME.equals(t.getNameAsString())).findFirst();
+    TableDescriptor tableDescriptor = hBaseAdmin.getTableDescriptor(tableNameOptional.get());
+    Assert.assertEquals(tableDescriptor.getDurability().toString(),"ASYNC_WAL");
+
+    //Try one more time. This time, modifyTable should be 'false'
+    assertFalse(hdb.initPoliciesAndTTL());
   }
 
   private Multimap<String, List<Function>> singletonValueFunctionMap(String metricName) {
diff --git a/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/TimelineMetricsFilterTest.java b/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/TimelineMetricsFilterTest.java
index 9a5ae47..70b47c7 100644
--- a/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/TimelineMetricsFilterTest.java
+++ b/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/TimelineMetricsFilterTest.java
@@ -84,6 +84,29 @@
   }
 
   @Test
+  public void testMetricBlacklisting() throws Exception {
+
+    Configuration metricsConf = new Configuration();
+    TimelineMetricConfiguration configuration = EasyMock.createNiceMock(TimelineMetricConfiguration.class);
+    expect(configuration.getMetricsConf()).andReturn(metricsConf).once();
+    replay(configuration);
+
+    URL fileUrl = ClassLoader.getSystemResource("test_data/metric_blacklist.dat");
+
+    metricsConf.set("timeline.metrics.blacklist.file", fileUrl.getPath());
+    TimelineMetricsFilter.initializeMetricFilter(configuration);
+
+    TimelineMetric timelineMetric = new TimelineMetric();
+
+    timelineMetric.setMetricName("cpu_system");
+    Assert.assertTrue(TimelineMetricsFilter.acceptMetric(timelineMetric));
+
+    timelineMetric.setMetricName("cpu_idle");
+    Assert.assertFalse(TimelineMetricsFilter.acceptMetric(timelineMetric));
+  }
+
+
+  @Test
   public void testTogether() throws Exception {
 
     Configuration metricsConf = new Configuration();
@@ -160,6 +183,8 @@
     metricsConf.set("timeline.metrics.apps.whitelist", "namenode,nimbus");
     metricsConf.set("timeline.metrics.apps.blacklist", "datanode,kafka_broker");
     metricsConf.set("timeline.metrics.whitelist.file", getTestWhitelistFilePath());
+    URL fileUrl2 = ClassLoader.getSystemResource("test_data/metric_blacklist.dat");
+    metricsConf.set("timeline.metrics.blacklist.file", fileUrl2.getPath());
     expect(configuration.getMetricsConf()).andReturn(metricsConf).once();
 
     Set<String> whitelist = new HashSet<>();
@@ -176,6 +201,16 @@
 
     TimelineMetric timelineMetric = new TimelineMetric();
 
+
+    //Test Metric Blacklisting
+    timelineMetric.setMetricName("cpu_idle");
+    timelineMetric.setAppId("namenode");
+    Assert.assertFalse(TimelineMetricsFilter.acceptMetric(timelineMetric));
+
+    timelineMetric.setMetricName("jvm.HeapMetrics.m1");
+    timelineMetric.setAppId("nimbus");
+    Assert.assertFalse(TimelineMetricsFilter.acceptMetric(timelineMetric));
+
     //Test App Whitelisting
     timelineMetric.setMetricName("metric.a.b.c");
     timelineMetric.setAppId("namenode");
diff --git a/ambari-metrics-timelineservice/src/test/resources/test_data/metric_blacklist.dat b/ambari-metrics-timelineservice/src/test/resources/test_data/metric_blacklist.dat
new file mode 100644
index 0000000..7a5bead
--- /dev/null
+++ b/ambari-metrics-timelineservice/src/test/resources/test_data/metric_blacklist.dat
@@ -0,0 +1,2 @@
+cpu_idle
+._p_jvm.HeapMetrics*
\ No newline at end of file