AMBARI-22717 : Remove Anomaly Detection code from branch-3.0-ams. (avijayan)
diff --git a/ambari-metrics-anomaly-detection-service/conf/unix/ambari-metrics-admanager b/ambari-metrics-anomaly-detection-service/conf/unix/ambari-metrics-admanager
deleted file mode 100644
index 98b7606..0000000
--- a/ambari-metrics-anomaly-detection-service/conf/unix/ambari-metrics-admanager
+++ /dev/null
@@ -1,227 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific
-
-PIDFILE=/var/run/ambari-metrics-anomaly-detection/ambari-metrics-admanager.pid
-OUTFILE=/var/log/ambari-metrics-anomaly-detection/ambari-metrics-admanager.out
-
-CONF_DIR=/etc/ambari-metrics-anomaly-detection/conf
-DAEMON_NAME=ams_admanager
-SPARK_HOME=/usr/lib/ambari-metrics-anomaly-detection/spark
-
-SPARK_MASTER_PID=/var/run/ambari-metrics-anomaly-detection/spark-ams-org.apache.spark.deploy.master.Master.pid
-
-STOP_TIMEOUT=10
-
-function spark_daemon
-{
-    local cmd=$1
-    local pid
-
-    if [[ "${cmd}" == "start" ]]
-      then
-
-        ${SPARK_HOME}/sbin/start-master.sh
-        sleep 2
-        master_pid=$(cat "$SPARK_MASTER_PID")
-        if [ -z "`ps ax | grep -w ${master_pid} | grep org.apache.spark.deploy.master.Master`" ]; then
-          echo "ERROR: Spark Master start failed. For more details, see outfile in log directory."
-          exit -1
-        fi
-
-        ${SPARK_HOME}/sbin/start-slave.sh spark://${SPARK_MASTER_HOST}:${SPARK_MASTER_PORT}
-    elif [[ "${cmd}" == "stop" ]]
-      then
-        ${SPARK_HOME}/sbin/stop-slave.sh
-        ${SPARK_HOME}/sbin/stop-master.sh
-    else
-        pid=${SPARK_MASTER_PID}
-        daemon_status "${pid}"
-    fi
-
-}
-
-function write_pidfile
-{
-    local pidfile="$1"
-    echo $! > "${pidfile}" 2>/dev/null
-    if [[ $? -gt 0 ]]; then
-      echo "ERROR:  Cannot write pid ${pidfile}." | tee -a $STARTUPFILE
-      exit 1;
-    fi
-}
-
-function java_setup
-{
-  # Bail if we did not detect it
-  if [[ -z "${JAVA_HOME}" ]]; then
-    echo "ERROR: JAVA_HOME is not set and could not be found."
-    exit 1
-  fi
-
-  if [[ ! -d "${JAVA_HOME}" ]]; then
-    echo "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
-    exit 1
-  fi
-
-  JAVA="${JAVA_HOME}/bin/java"
-
-  if [[ ! -x "$JAVA" ]]; then
-    echo "ERROR: $JAVA is not executable."
-    exit 1
-  fi
-}
-
-function daemon_status()
-{
-  local pidfile="$1"
-  shift
-
-  local pid
-
-  if [[ -f "${pidfile}" ]]; then
-    pid=$(cat "${pidfile}")
-    if ps -p "${pid}" > /dev/null 2>&1; then
-      return 0
-    fi
-    return 1
-  fi
-  return 3
-}
-
-function start()
-{
-  java_setup
-
-
-  if [[ "${AMS_AD_STANDALONE_SPARK_ENABLED}" == "true" || "${AMS_AD_STANDALONE_SPARK_ENABLED}" == "True" ]]
-  then
-    spark_daemon "start"
-  fi
-
-  daemon_status "${PIDFILE}"
-  if [[ $? == 0  ]]; then
-    echo "AMS AD Manager is running as process $(cat "${PIDFILE}"). Exiting" | tee -a $STARTUPFILE
-    exit 0
-  else
-    # stale pid file, so just remove it and continue on
-    rm -f "${PIDFILE}" >/dev/null 2>&1
-  fi
-
-  nohup "${JAVA}" "-Xms$AMS_AD_HEAPSIZE" "-Xmx$AMS_AD_HEAPSIZE" ${AMS_AD_OPTS} "-Dlog4j.configuration=file://$CONF_DIR/log4j.properties" "-jar" "/usr/lib/ambari-metrics-anomaly-detection/ambari-metrics-anomaly-detection-service.jar" "server" "${CONF_DIR}/config.yaml" "$@" > $OUTFILE 2>&1 &
-  PID=$!
-  write_pidfile "${PIDFILE}"
-  sleep 2
-
-  echo "Verifying ${DAEMON_NAME} process status..."
-  if [ -z "`ps ax -o pid | grep ${PID}`" ]; then
-    if [ -s ${OUTFILE} ]; then
-      echo "ERROR: ${DAEMON_NAME} start failed. For more details, see ${OUTFILE}:"
-      echo "===================="
-      tail -n 10 ${OUTFILE}
-      echo "===================="
-    else
-      echo "ERROR: ${DAEMON_NAME} start failed"
-      rm -f ${PIDFILE}
-    fi
-    echo "Anomaly Detection Manager out at: ${OUTFILE}"
-    exit -1
-  fi
-
-  rm -f $STARTUPFILE #Deleting startup file
-  echo "Anomaly Detection Manager successfully started."
-  }
-
-function stop()
-{
-  pidfile=${PIDFILE}
-
-  if [[ -f "${pidfile}" ]]; then
-    pid=$(cat "$pidfile")
-
-    kill "${pid}" >/dev/null 2>&1
-    sleep "${STOP_TIMEOUT}"
-
-    if kill -0 "${pid}" > /dev/null 2>&1; then
-      echo "WARNING: ${DAEMON_NAME} did not stop gracefully after ${STOP_TIMEOUT} seconds: Trying to kill with kill -9"
-      kill -9 "${pid}" >/dev/null 2>&1
-    fi
-
-    if ps -p "${pid}" > /dev/null 2>&1; then
-      echo "ERROR: Unable to kill ${pid}"
-    else
-      rm -f "${pidfile}" >/dev/null 2>&1
-    fi
-  fi
-
-  #Let's try to stop spark always since if the user has flipped the spark mode to 'yarn', the enabled flag becomes obsolete.
-  spark_daemon "stop"
-}
-
-# execute ams-admanager-env.sh
-if [[ -f "${CONF_DIR}/ams-admanager-env.sh" ]]; then
-  . "${CONF_DIR}/ams-admanager-env.sh"
-else
-  echo "ERROR: Cannot execute ${CONF_DIR}/ams-admanager-env.sh." 2>&1
-  exit 1
-fi
-
-if [[ -f "${CONF_DIR}/ams-admanager-spark-env.sh" ]]; then
-  . "${CONF_DIR}/ams-admanager-spark-env.sh"
-else
-  echo "ERROR: Cannot execute ${CONF_DIR}/ams-admanager-spark-env.sh." 2>&1
-  exit 1
-fi
-
-# set these env variables only if they were not set by ams-admanager-env.sh
-: ${AMS_AD_LOG_DIR:=/var/log/ambari-metrics-anomaly-detection}
-: ${AMS_AD_STANDALONE_SPARK_ENABLED:=true}
-
-# set pid dir path
-if [[ -n "${AMS_AD_PID_DIR}" ]]; then
-  PIDFILE=${AMS_AD_PID_DIR}/ambari-metrics-admanager.pid
-  SPARK_MASTER_PID=${AMS_AD_PID_DIR}/spark-${USER}-org.apache.spark.deploy.master.Master-1.pid
-fi
-
-# set out file path
-if [[ -n "${AMS_AD_LOG_DIR}" ]]; then
-  OUTFILE=${AMS_AD_LOG_DIR}/ambari-metrics-admanager.out
-fi
-
-#TODO manage 3 hbase daemons for start/stop/status
-case "$1" in
-
-	start)
-    start
-
-    ;;
-	stop)
-    stop
-
-    ;;
-	status)
-	    daemon_status "${PIDFILE}"
-	    if [[ $? == 0  ]]; then
-            echo "AMS AD Manager is running as process $(cat "${PIDFILE}")."
-        else
-            echo "AMS AD Manager is not running."
-        fi
-    ;;
-	restart)
-	  stop
-	  start
-	;;
-
-esac
diff --git a/ambari-metrics-anomaly-detection-service/conf/unix/config.yaml b/ambari-metrics-anomaly-detection-service/conf/unix/config.yaml
deleted file mode 100644
index 85e4004..0000000
--- a/ambari-metrics-anomaly-detection-service/conf/unix/config.yaml
+++ /dev/null
@@ -1,45 +0,0 @@
-#Licensed under the Apache License, Version 2.0 (the "License");
-#you may not use this file except in compliance with the License.
-#You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-#Unless required by applicable law or agreed to in writing, software
-#distributed under the License is distributed on an "AS IS" BASIS,
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#See the License for the specific language governing permissions and
-#limitations under the License.
-
-server:
-  applicationConnectors:
-   - type: http
-     port: 9999
-  requestLog:
-    type: external
-
-logging:
-  type: external
-
-metricDefinitionService:
-  inputDefinitionDirectory: /etc/ambari-metrics-anomaly-detection/conf/definitionDirectory
-
-metricsCollector:
-  hosts: host1,host2
-  port: 6188
-  protocol: http
-  metadataEndpoint: /ws/v1/timeline/metrics/metadata/key
-
-adQueryService:
-  anomalyDataTtl: 604800
-
-metricDefinitionDB:
-  # force checksum verification of all data that is read from the file system on behalf of a particular read
-  verifyChecksums: true
-  # raise an error as soon as it detects an internal corruption
-  performParanoidChecks: false
-  # Path to Level DB directory
-  dbDirPath: /tmp/ambari-metrics-anomaly-detection/db
-
-spark:
-  mode: standalone
-  masterHostPort: localhost:7077
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/conf/unix/log4j.properties b/ambari-metrics-anomaly-detection-service/conf/unix/log4j.properties
deleted file mode 100644
index 9dba1da..0000000
--- a/ambari-metrics-anomaly-detection-service/conf/unix/log4j.properties
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Define some default values that can be overridden by system properties
-# Root logger option
-log4j.rootLogger=INFO,file
-
-# Direct log messages to a log file
-log4j.appender.file=org.apache.log4j.RollingFileAppender
-log4j.appender.file.File=/var/log/ambari-metrics-anomaly-detection/ambari-metrics-admanager.log
-log4j.appender.file.MaxFileSize=80MB
-log4j.appender.file.MaxBackupIndex=60
-log4j.appender.file.layout=org.apache.log4j.PatternLayout
-log4j.appender.file.layout.ConversionPattern=%d{ABSOLUTE} %5p [%t] %c{1}:%L - %m%n
-
-
diff --git a/ambari-metrics-anomaly-detection-service/pom.xml b/ambari-metrics-anomaly-detection-service/pom.xml
deleted file mode 100644
index 50d7ef6..0000000
--- a/ambari-metrics-anomaly-detection-service/pom.xml
+++ /dev/null
@@ -1,528 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <parent>
-    <artifactId>ambari-metrics</artifactId>
-    <groupId>org.apache.ambari</groupId>
-    <version>2.0.0.0-SNAPSHOT</version>
-  </parent>
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>ambari-metrics-anomaly-detection-service</artifactId>
-  <version>2.0.0.0-SNAPSHOT</version>
-  <name>Ambari Metrics Anomaly Detection Service</name>
-  <packaging>jar</packaging>
-
-  <properties>
-    <scala.version>2.12.3</scala.version>
-    <scala.binary.version>2.11</scala.binary.version>
-    <jackson.version>2.9.1</jackson.version>
-    <dropwizard.version>1.2.0</dropwizard.version>
-    <spark.version>2.1.1</spark.version>
-    <hadoop.version>2.7.3.2.6.0.3-8</hadoop.version>
-    <hbase.version>1.1.2.2.6.0.3-8</hbase.version>
-    <phoenix.version>4.7.0.2.6.0.3-8</phoenix.version>
-  </properties>
-  
-  <repositories>
-    <repository>
-      <id>scala-tools.org</id>
-      <name>Scala-Tools Maven2 Repository</name>
-      <url>http://scala-tools.org/repo-releases</url>
-    </repository>
-  </repositories>
-
-  <pluginRepositories>
-    <pluginRepository>
-      <id>scala-tools.org</id>
-      <name>Scala-Tools Maven2 Repository</name>
-      <url>http://scala-tools.org/repo-releases</url>
-    </pluginRepository>
-  </pluginRepositories>
-
-  <build>
-    <finalName>${project.artifactId}-${project.version}</finalName>
-    <resources>
-      <resource>
-        <filtering>true</filtering>
-        <directory>src/main/resources</directory>
-        <includes>
-          <include>**/*.yml</include>
-          <include>**/*.xml</include>
-          <include>**/*.txt</include>
-        </includes>
-      </resource>
-    </resources>
-    <plugins>
-      <plugin>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <configuration>
-          <source>1.8</source>
-          <target>1.8</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>net.alchim31.maven</groupId>
-        <artifactId>scala-maven-plugin</artifactId>
-        <version>3.3.1</version>
-        <executions>
-          <execution>
-            <id>scala-compile-first</id>
-            <phase>process-resources</phase>
-            <goals>
-              <goal>add-source</goal>
-              <goal>compile</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>scala-test-compile</id>
-            <phase>process-test-resources</phase>
-            <goals>
-              <goal>testCompile</goal>
-            </goals>
-          </execution>
-        </executions>
-        <configuration>
-          <jvmArgs>
-            <jvmArg>-Xms512m</jvmArg>
-            <jvmArg>-Xmx2048m</jvmArg>
-          </jvmArgs>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.scalatest</groupId>
-        <artifactId>scalatest-maven-plugin</artifactId>
-        <version>1.0</version>
-      </plugin>
-      <plugin>
-        <groupId>org.scala-tools</groupId>
-        <artifactId>maven-scala-plugin</artifactId>
-        <executions>
-          <execution>
-            <goals>
-              <goal>compile</goal>
-              <goal>testCompile</goal>
-            </goals>
-          </execution>
-        </executions>
-        <configuration>
-          <scalaVersion>${scala.version}</scalaVersion>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <version>2.5</version>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-shade-plugin</artifactId>
-        <version>3.1.0</version>
-        <configuration>
-          <createDependencyReducedPom>false</createDependencyReducedPom>
-          <!--<minimizeJar>true</minimizeJar>-->
-          <filters>
-            <filter>
-              <artifact>*:*</artifact>
-              <excludes>
-                <exclude>META-INF/*.SF</exclude>
-                <exclude>META-INF/*.DSA</exclude>
-                <exclude>META-INF/*.RSA</exclude>
-              </excludes>
-            </filter>
-            <filter>
-              <artifact>org.apache.phoenix:phoenix-core</artifact>
-              <excludes>
-                <exclude>org/joda/time/**</exclude>
-                <exclude>com/codahale/metrics/**</exclude>
-                <exclude>com/google/common/collect/**</exclude>
-              </excludes>
-            </filter>
-            <filter>
-              <artifact>*:*</artifact>
-              <excludes>
-                <exclude>com/sun/jersey/**</exclude>
-              </excludes>
-            </filter>
-          </filters>
-        </configuration>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-            <configuration>
-              <transformers>
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
-                  <mainClass>
-                    org.apache.ambari.metrics.adservice.app.AnomalyDetectionApp
-                  </mainClass>
-                </transformer>
-              </transformers>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-antrun-plugin</artifactId>
-        <version>1.7</version>
-        <executions>
-          <execution>
-            <phase>generate-resources</phase>
-            <goals>
-              <goal>run</goal>
-            </goals>
-            <configuration>
-              <target name="Download Spark">
-                <mkdir dir="${project.build.directory}/embedded"/>
-                <get
-                        src="${spark.tar}"
-                        dest="${project.build.directory}/embedded/spark.tar.gz"
-                        usetimestamp="true"
-                />
-                <untar
-                        src="${project.build.directory}/embedded/spark.tar.gz"
-                        dest="${project.build.directory}/embedded"
-                        compression="gzip"
-                />
-                <move
-                        todir="${project.build.directory}/embedded/spark" >
-                        <fileset dir="${project.build.directory}/embedded/${spark.folder}" includes="**"/>
-                </move>
-              </target>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-
-  <dependencies>
-    <dependency>
-      <groupId>commons-lang</groupId>
-      <artifactId>commons-lang</artifactId>
-      <version>2.5</version>
-    </dependency>
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-api</artifactId>
-      <version>1.7.2</version>
-    </dependency>
-    <dependency>
-      <groupId>com.github.lucarosellini.rJava</groupId>
-      <artifactId>JRI</artifactId>
-      <version>0.9-7</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-streaming_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.kafka</groupId>
-      <artifactId>kafka_2.10</artifactId>
-      <version>0.10.1.0</version>
-      <exclusions>
-        <exclusion>
-          <groupId>com.sun.jdmk</groupId>
-          <artifactId>jmxtools</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.sun.jmx</groupId>
-          <artifactId>jmxri</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>javax.mail</groupId>
-          <artifactId>mail</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>javax.jms</groupId>
-          <artifactId>jmx</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>javax.jms</groupId>
-          <artifactId>jms</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.kafka</groupId>
-      <artifactId>kafka-clients</artifactId>
-      <version>0.10.1.0</version>
-    </dependency>
-    <dependency>
-      <groupId>com.fasterxml.jackson.core</groupId>
-      <artifactId>jackson-databind</artifactId>
-      <version>${jackson.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.kafka</groupId>
-      <artifactId>connect-json</artifactId>
-      <version>0.10.1.0</version>
-      <exclusions>
-        <exclusion>
-          <artifactId>jackson-databind</artifactId>
-          <groupId>com.fasterxml.jackson.core</groupId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-streaming-kafka_2.10</artifactId>
-      <version>1.6.3</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-core</artifactId>
-      <version>${phoenix.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-annotations</artifactId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jersey-core</artifactId>
-          <groupId>com.sun.jersey</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jersey-server</artifactId>
-          <groupId>com.sun.jersey</groupId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-common</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpclient</artifactId>
-      <version>4.2.5</version>
-    </dependency>
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-library</artifactId>
-      <version>${scala.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-core_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>com.fasterxml.jackson.module</groupId>
-          <artifactId>jackson-module-scala_2.11</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-mllib_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>com.fasterxml.jackson.core</groupId>
-          <artifactId>jackson-databind</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
-      <version>${hadoop.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>commons-el</groupId>
-          <artifactId>commons-el</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>tomcat</groupId>
-          <artifactId>jasper-runtime</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>tomcat</groupId>
-          <artifactId>jasper-compiler</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.mortbay.jetty</groupId>
-          <artifactId>jsp-2.1-jetty</artifactId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jersey-server</artifactId>
-          <groupId>com.sun.jersey</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jersey-core</artifactId>
-          <groupId>com.sun.jersey</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jersey-json</artifactId>
-          <groupId>com.sun.jersey</groupId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.scalatest</groupId>
-      <artifactId>scalatest_2.12</artifactId>
-      <version>3.0.1</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>io.dropwizard</groupId>
-      <artifactId>dropwizard-core</artifactId>
-      <version>${dropwizard.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.glassfish.hk2.external</groupId>
-          <artifactId>javax.inject</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.glassfish.hk2.external</groupId>
-          <artifactId>aopalliance-repackaged</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>ch.qos.logback</groupId>
-          <artifactId>logback-classic</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>ch.qos.logback</groupId>
-          <artifactId>logback-access</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.slf4j</groupId>
-          <artifactId>log4j-over-slf4j</artifactId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jersey-server</artifactId>
-          <groupId>org.glassfish.jersey.core</groupId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
-      <version>1.2.17</version>
-    </dependency>
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
-      <version>1.7.21</version>
-    </dependency>
-    <dependency>
-      <groupId>io.dropwizard</groupId>
-      <artifactId>dropwizard-testing</artifactId>
-      <version>${dropwizard.version}</version>
-      <scope>test</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>org.glassfish.hk2.external</groupId>
-          <artifactId>javax.inject</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>joda-time</groupId>
-      <artifactId>joda-time</artifactId>
-      <version>2.9.4</version>
-    </dependency>
-    <dependency>
-      <groupId>org.joda</groupId>
-      <artifactId>joda-convert</artifactId>
-      <version>1.8.1</version>
-    </dependency>
-    <dependency>
-      <groupId>com.google.inject</groupId>
-      <artifactId>guice</artifactId>
-      <version>4.1.0</version>
-    </dependency>
-    <dependency>
-      <groupId>com.google.inject.extensions</groupId>
-      <artifactId>guice-multibindings</artifactId>
-      <version>4.1.0</version>
-    </dependency>
-    <dependency>
-      <groupId>com.fasterxml.jackson.module</groupId>
-      <artifactId>jackson-module-scala_2.12</artifactId>
-      <version>${jackson.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>com.fasterxml.jackson.datatype</groupId>
-      <artifactId>jackson-datatype-jdk8</artifactId>
-      <version>${jackson.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.fusesource.leveldbjni</groupId>
-      <artifactId>leveldbjni-all</artifactId>
-      <version>1.8</version>
-    </dependency>
-    <dependency>
-      <groupId>org.iq80.leveldb</groupId>
-      <artifactId>leveldb</artifactId>
-      <version>0.9</version>
-    </dependency>
-    <!-- https://mvnrepository.com/artifact/org.scalaj/scalaj-http -->
-    <dependency>
-      <groupId>org.scalaj</groupId>
-      <artifactId>scalaj-http_2.12</artifactId>
-      <version>2.3.0</version>
-    </dependency>
-
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <version>4.12</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>com.google.guava</groupId>
-      <artifactId>guava</artifactId>
-      <version>18.0</version>
-    </dependency>
-    <dependency>
-      <groupId>io.dropwizard.metrics</groupId>
-      <artifactId>metrics-core</artifactId>
-      <version>3.2.5</version>
-    </dependency>
-    <dependency>
-      <groupId>org.easymock</groupId>
-      <artifactId>easymock</artifactId>
-      <version>2.5</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
-      <version>1.8.4</version>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
-</project>
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/assemblies/empty.xml b/ambari-metrics-anomaly-detection-service/src/main/assemblies/empty.xml
deleted file mode 100644
index 35738b1..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/assemblies/empty.xml
+++ /dev/null
@@ -1,21 +0,0 @@
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-  
-       http://www.apache.org/licenses/LICENSE-2.0
-  
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly>
-    <id>empty</id>
-    <formats/>
-</assembly>
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/common/DataSeries.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/common/DataSeries.java
deleted file mode 100644
index 54b402f..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/common/DataSeries.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.common;
-
-import java.util.Arrays;
-
-public class DataSeries {
-
-    public String seriesName;
-    public double[] ts;
-    public double[] values;
-
-    public DataSeries(String seriesName, double[] ts, double[] values) {
-        this.seriesName = seriesName;
-        this.ts = ts;
-        this.values = values;
-    }
-
-    @Override
-    public String toString() {
-        return seriesName + Arrays.toString(ts) + Arrays.toString(values);
-    }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/common/ResultSet.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/common/ResultSet.java
deleted file mode 100644
index dd3038f..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/common/ResultSet.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.common;
-
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class ResultSet {
-
-    public List<double[]> resultset = new ArrayList<>();
-
-    public ResultSet(List<double[]> resultset) {
-        this.resultset = resultset;
-    }
-
-    public void print() {
-        System.out.println("Result : ");
-        if (!resultset.isEmpty()) {
-            for (int i = 0; i<resultset.get(0).length;i++) {
-                for (double[] entity : resultset) {
-                    System.out.print(entity[i] + " ");
-                }
-                System.out.println();
-            }
-        }
-    }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/common/StatisticUtils.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/common/StatisticUtils.java
deleted file mode 100644
index 0a22e50..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/common/StatisticUtils.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.common;
-
-
-import java.util.Arrays;
-
-public class StatisticUtils {
-
-  public static double mean(double[] values) {
-    double sum = 0;
-    for (double d : values) {
-      sum += d;
-    }
-    return sum / values.length;
-  }
-
-  public static double variance(double[] values) {
-    double avg =  mean(values);
-    double variance = 0;
-    for (double d : values) {
-      variance += Math.pow(d - avg, 2.0);
-    }
-    return variance;
-  }
-
-  public static double sdev(double[]  values, boolean useBesselsCorrection) {
-    double variance = variance(values);
-    int n = (useBesselsCorrection) ? values.length - 1 : values.length;
-    return Math.sqrt(variance / n);
-  }
-
-  public static double median(double[] values) {
-    double[] clonedValues = Arrays.copyOf(values, values.length);
-    Arrays.sort(clonedValues);
-    int n = values.length;
-
-    if (n % 2 != 0) {
-      return clonedValues[(n-1)/2];
-    } else {
-      return ( clonedValues[(n-1)/2] + clonedValues[n/2] ) / 2;
-    }
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/AmbariServerInterface.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/AmbariServerInterface.java
deleted file mode 100644
index ac50c54..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/AmbariServerInterface.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.prototype.core;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Serializable;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.nio.charset.StandardCharsets;
-import java.util.Base64;
-
-public class AmbariServerInterface implements Serializable{
-
-  private static final Log LOG = LogFactory.getLog(AmbariServerInterface.class);
-
-  private String ambariServerHost;
-  private String clusterName;
-
-  public AmbariServerInterface(String ambariServerHost, String clusterName) {
-    this.ambariServerHost = ambariServerHost;
-    this.clusterName = clusterName;
-  }
-
-  public int getPointInTimeSensitivity() {
-
-    String url = constructUri("http", ambariServerHost, "8080", "/api/v1/clusters/" + clusterName + "/alert_definitions?fields=*");
-
-    URL obj = null;
-    BufferedReader in = null;
-
-    try {
-      obj = new URL(url);
-      HttpURLConnection con = (HttpURLConnection) obj.openConnection();
-      con.setRequestMethod("GET");
-
-      String encoded = Base64.getEncoder().encodeToString(("admin:admin").getBytes(StandardCharsets.UTF_8));
-      con.setRequestProperty("Authorization", "Basic "+encoded);
-
-      int responseCode = con.getResponseCode();
-      LOG.info("Sending 'GET' request to URL : " + url);
-      LOG.info("Response Code : " + responseCode);
-
-      in = new BufferedReader(
-        new InputStreamReader(con.getInputStream()));
-
-      StringBuilder responseJsonSb = new StringBuilder();
-      String line;
-      while ((line = in.readLine()) != null) {
-        responseJsonSb.append(line);
-      }
-
-//      JSONObject jsonObject = new JSONObject(responseJsonSb.toString());
-//      JSONArray array = jsonObject.getJSONArray("items");
-//      for(int i = 0 ; i < array.length() ; i++){
-//        JSONObject alertDefn = array.getJSONObject(i).getJSONObject("AlertDefinition");
-//        if (alertDefn.get("name") != null && alertDefn.get("name").equals("point_in_time_metrics_anomalies")) {
-//          JSONObject sourceNode = alertDefn.getJSONObject("source");
-//          JSONArray params = sourceNode.getJSONArray("parameters");
-//          for(int j = 0 ; j < params.length() ; j++){
-//            JSONObject param = params.getJSONObject(j);
-//            if (param.get("name").equals("sensitivity")) {
-//              return param.getInt("value");
-//            }
-//          }
-//          break;
-//        }
-//      }
-
-    } catch (Exception e) {
-      LOG.error(e);
-    } finally {
-      if (in != null) {
-        try {
-          in.close();
-        } catch (IOException e) {
-          LOG.warn(e);
-        }
-      }
-    }
-
-    return -1;
-  }
-
-  private String constructUri(String protocol, String host, String port, String path) {
-    StringBuilder sb = new StringBuilder(protocol);
-    sb.append("://");
-    sb.append(host);
-    sb.append(":");
-    sb.append(port);
-    sb.append(path);
-    return sb.toString();
-  }
-
-//  public static void main(String[] args) {
-//    AmbariServerInterface ambariServerInterface = new AmbariServerInterface();
-//    ambariServerInterface.getPointInTimeSensitivity("avijayan-ams-1.openstacklocal","c1");
-//  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/MetricKafkaProducer.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/MetricKafkaProducer.java
deleted file mode 100644
index 167fbb3..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/MetricKafkaProducer.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.core;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.kafka.clients.producer.KafkaProducer;
-import org.apache.kafka.clients.producer.Producer;
-import org.apache.kafka.clients.producer.ProducerConfig;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.clients.producer.RecordMetadata;
-
-import java.util.Properties;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-
-public class MetricKafkaProducer {
-
-    Producer producer;
-    private static String topicName = "ambari-metrics-topic";
-
-    public MetricKafkaProducer(String kafkaServers) {
-        Properties configProperties = new Properties();
-        configProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServers); //"avijayan-ams-2.openstacklocal:6667"
-        configProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.ByteArraySerializer");
-        configProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.connect.json.JsonSerializer");
-        producer = new KafkaProducer(configProperties);
-    }
-
-    public void sendMetrics(TimelineMetrics timelineMetrics) throws InterruptedException, ExecutionException {
-
-        ObjectMapper objectMapper = new ObjectMapper();
-        JsonNode jsonNode = objectMapper.valueToTree(timelineMetrics);
-        ProducerRecord<String, JsonNode> rec = new ProducerRecord<String, JsonNode>(topicName,jsonNode);
-        Future<RecordMetadata> kafkaFuture =  producer.send(rec);
-
-        System.out.println(kafkaFuture.isDone());
-        System.out.println(kafkaFuture.get().topic());
-    }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/MetricSparkConsumer.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/MetricSparkConsumer.java
deleted file mode 100644
index addeda7..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/MetricSparkConsumer.java
+++ /dev/null
@@ -1,244 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.core;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.ambari.metrics.adservice.prototype.methods.MetricAnomaly;
-import org.apache.ambari.metrics.adservice.prototype.methods.ema.EmaTechnique;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.spark.SparkConf;
-import org.apache.spark.api.java.function.Function;
-import org.apache.spark.broadcast.Broadcast;
-import org.apache.spark.streaming.Duration;
-import org.apache.spark.streaming.api.java.JavaDStream;
-import org.apache.spark.streaming.api.java.JavaPairDStream;
-import org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream;
-import org.apache.spark.streaming.api.java.JavaStreamingContext;
-import org.apache.spark.streaming.kafka.KafkaUtils;
-import scala.Tuple2;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class MetricSparkConsumer {
-
-  private static final Log LOG = LogFactory.getLog(MetricSparkConsumer.class);
-  private static String groupId = "ambari-metrics-group";
-  private static String topicName = "ambari-metrics-topic";
-  private static int numThreads = 1;
-  private static long pitStartTime = System.currentTimeMillis();
-  private static long ksStartTime = pitStartTime;
-  private static long hdevStartTime = ksStartTime;
-  private static Set<Pattern> includeMetricPatterns = new HashSet<>();
-  private static Set<String> includedHosts = new HashSet<>();
-  private static Set<TrendMetric> trendMetrics = new HashSet<>();
-
-  public MetricSparkConsumer() {
-  }
-
-  public static Properties readProperties(String propertiesFile) {
-    try {
-      Properties properties = new Properties();
-      InputStream inputStream = ClassLoader.getSystemResourceAsStream(propertiesFile);
-      if (inputStream == null) {
-        inputStream = new FileInputStream(propertiesFile);
-      }
-      properties.load(inputStream);
-      return properties;
-    } catch (IOException ioEx) {
-      LOG.error("Error reading properties file for jmeter");
-      return null;
-    }
-  }
-
-  public static void main(String[] args) throws InterruptedException {
-
-    if (args.length < 1) {
-      System.err.println("Usage: MetricSparkConsumer <input-config-file>");
-      System.exit(1);
-    }
-
-    Properties properties = readProperties(args[0]);
-
-    List<String> appIds = Arrays.asList(properties.getProperty("appIds").split(","));
-
-    String collectorHost = properties.getProperty("collectorHost");
-    String collectorPort = properties.getProperty("collectorPort");
-    String collectorProtocol = properties.getProperty("collectorProtocol");
-
-    String zkQuorum = properties.getProperty("zkQuorum");
-
-    double emaW = Double.parseDouble(properties.getProperty("emaW"));
-    double emaN = Double.parseDouble(properties.getProperty("emaN"));
-    int emaThreshold = Integer.parseInt(properties.getProperty("emaThreshold"));
-    double tukeysN = Double.parseDouble(properties.getProperty("tukeysN"));
-
-    long pitTestInterval = Long.parseLong(properties.getProperty("pointInTimeTestInterval"));
-    long pitTrainInterval = Long.parseLong(properties.getProperty("pointInTimeTrainInterval"));
-
-    long ksTestInterval = Long.parseLong(properties.getProperty("ksTestInterval"));
-    long ksTrainInterval = Long.parseLong(properties.getProperty("ksTrainInterval"));
-    int hsdevNhp = Integer.parseInt(properties.getProperty("hsdevNhp"));
-    long hsdevInterval = Long.parseLong(properties.getProperty("hsdevInterval"));
-
-    String ambariServerHost = properties.getProperty("ambariServerHost");
-    String clusterName = properties.getProperty("clusterName");
-
-    String includeMetricPatternStrings = properties.getProperty("includeMetricPatterns");
-    if (includeMetricPatternStrings != null && !includeMetricPatternStrings.isEmpty()) {
-      String[] patterns = includeMetricPatternStrings.split(",");
-      for (String p : patterns) {
-        LOG.info("Included Pattern : " + p);
-        includeMetricPatterns.add(Pattern.compile(p));
-      }
-    }
-
-    String includedHostList = properties.getProperty("hosts");
-    if (includedHostList != null && !includedHostList.isEmpty()) {
-      String[] hosts = includedHostList.split(",");
-      includedHosts.addAll(Arrays.asList(hosts));
-    }
-
-    MetricsCollectorInterface metricsCollectorInterface = new MetricsCollectorInterface(collectorHost, collectorProtocol, collectorPort);
-
-    SparkConf sparkConf = new SparkConf().setAppName("AmbariMetricsAnomalyDetector");
-
-    JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, new Duration(10000));
-
-    EmaTechnique emaTechnique = new EmaTechnique(emaW, emaN, emaThreshold);
-    PointInTimeADSystem pointInTimeADSystem = new PointInTimeADSystem(metricsCollectorInterface,
-      tukeysN,
-      pitTestInterval,
-      pitTrainInterval,
-      ambariServerHost,
-      clusterName);
-
-    TrendADSystem trendADSystem = new TrendADSystem(metricsCollectorInterface,
-      ksTestInterval,
-      ksTrainInterval,
-      hsdevNhp);
-
-    Broadcast<EmaTechnique> emaTechniqueBroadcast = jssc.sparkContext().broadcast(emaTechnique);
-    Broadcast<PointInTimeADSystem> pointInTimeADSystemBroadcast = jssc.sparkContext().broadcast(pointInTimeADSystem);
-    Broadcast<TrendADSystem> trendADSystemBroadcast = jssc.sparkContext().broadcast(trendADSystem);
-    Broadcast<MetricsCollectorInterface> metricsCollectorInterfaceBroadcast = jssc.sparkContext().broadcast(metricsCollectorInterface);
-    Broadcast<Set<Pattern>> includePatternBroadcast = jssc.sparkContext().broadcast(includeMetricPatterns);
-    Broadcast<Set<String>> includedHostBroadcast = jssc.sparkContext().broadcast(includedHosts);
-
-    JavaPairReceiverInputDStream<String, String> messages =
-      KafkaUtils.createStream(jssc, zkQuorum, groupId, Collections.singletonMap(topicName, numThreads));
-
-    //Convert JSON string to TimelineMetrics.
-    JavaDStream<TimelineMetrics> timelineMetricsStream = messages.map(new Function<Tuple2<String, String>, TimelineMetrics>() {
-      @Override
-      public TimelineMetrics call(Tuple2<String, String> message) throws Exception {
-        ObjectMapper mapper = new ObjectMapper();
-        TimelineMetrics metrics = mapper.readValue(message._2, TimelineMetrics.class);
-        return metrics;
-      }
-    });
-
-    timelineMetricsStream.print();
-
-    //Group TimelineMetric by AppId.
-    JavaPairDStream<String, TimelineMetrics> appMetricStream = timelineMetricsStream.mapToPair(
-      timelineMetrics -> timelineMetrics.getMetrics().isEmpty()  ?  new Tuple2<>("TEST", new TimelineMetrics()) : new Tuple2<String, TimelineMetrics>(timelineMetrics.getMetrics().get(0).getAppId(), timelineMetrics)
-    );
-
-    appMetricStream.print();
-
-    //Filter AppIds that are not needed.
-    JavaPairDStream<String, TimelineMetrics> filteredAppMetricStream = appMetricStream.filter(new Function<Tuple2<String, TimelineMetrics>, Boolean>() {
-      @Override
-      public Boolean call(Tuple2<String, TimelineMetrics> appMetricTuple) throws Exception {
-        return appIds.contains(appMetricTuple._1);
-      }
-    });
-
-    filteredAppMetricStream.print();
-
-    filteredAppMetricStream.foreachRDD(rdd -> {
-      rdd.foreach(
-        tuple2 -> {
-          long currentTime = System.currentTimeMillis();
-          EmaTechnique ema = emaTechniqueBroadcast.getValue();
-          if (currentTime > pitStartTime + pitTestInterval) {
-            LOG.info("Running Tukeys....");
-            pointInTimeADSystemBroadcast.getValue().runTukeysAndRefineEma(ema, currentTime);
-            pitStartTime = pitStartTime + pitTestInterval;
-          }
-
-          if (currentTime > ksStartTime + ksTestInterval) {
-            LOG.info("Running KS Test....");
-            trendADSystemBroadcast.getValue().runKSTest(currentTime, trendMetrics);
-            ksStartTime = ksStartTime + ksTestInterval;
-          }
-
-          if (currentTime > hdevStartTime + hsdevInterval) {
-            LOG.info("Running HSdev Test....");
-            trendADSystemBroadcast.getValue().runHsdevMethod();
-            hdevStartTime = hdevStartTime + hsdevInterval;
-          }
-
-          TimelineMetrics metrics = tuple2._2();
-          for (TimelineMetric timelineMetric : metrics.getMetrics()) {
-
-            boolean includeHost = includedHostBroadcast.getValue().contains(timelineMetric.getHostName());
-            boolean includeMetric = false;
-            if (includeHost) {
-              if (includePatternBroadcast.getValue().isEmpty()) {
-                includeMetric = true;
-              }
-              for (Pattern p : includePatternBroadcast.getValue()) {
-                Matcher m = p.matcher(timelineMetric.getMetricName());
-                if (m.find()) {
-                  includeMetric = true;
-                }
-              }
-            }
-
-            if (includeMetric) {
-              trendMetrics.add(new TrendMetric(timelineMetric.getMetricName(), timelineMetric.getAppId(),
-                timelineMetric.getHostName()));
-              List<MetricAnomaly> anomalies = ema.test(timelineMetric);
-              metricsCollectorInterfaceBroadcast.getValue().publish(anomalies);
-            }
-          }
-        });
-    });
-
-    jssc.start();
-    jssc.awaitTermination();
-  }
-}
-
-
-
-
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/MetricsCollectorInterface.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/MetricsCollectorInterface.java
deleted file mode 100644
index da3999a..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/MetricsCollectorInterface.java
+++ /dev/null
@@ -1,237 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.core;
-
-import org.apache.ambari.metrics.adservice.prototype.methods.MetricAnomaly;
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.codehaus.jackson.map.AnnotationIntrospector;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.ObjectReader;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.Serializable;
-import java.net.HttpURLConnection;
-import java.net.InetAddress;
-import java.net.URL;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.TreeMap;
-
-public class MetricsCollectorInterface implements Serializable {
-
-  private static String hostName = null;
-  private String instanceId = null;
-  public final static String serviceName = "anomaly-engine";
-  private String collectorHost;
-  private String protocol;
-  private String port;
-  private static final String WS_V1_TIMELINE_METRICS = "/ws/v1/timeline/metrics";
-  private static final Log LOG = LogFactory.getLog(MetricsCollectorInterface.class);
-  private static ObjectMapper mapper;
-  private final static ObjectReader timelineObjectReader;
-
-  static {
-    mapper = new ObjectMapper();
-    AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
-    mapper.setAnnotationIntrospector(introspector);
-    mapper.getSerializationConfig()
-      .withSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
-    timelineObjectReader = mapper.reader(TimelineMetrics.class);
-  }
-
-  public MetricsCollectorInterface(String collectorHost, String protocol, String port) {
-    this.collectorHost = collectorHost;
-    this.protocol = protocol;
-    this.port = port;
-    this.hostName = getDefaultLocalHostName();
-  }
-
-  public static String getDefaultLocalHostName() {
-
-    if (hostName != null) {
-      return hostName;
-    }
-
-    try {
-      return InetAddress.getLocalHost().getCanonicalHostName();
-    } catch (UnknownHostException e) {
-      LOG.info("Error getting host address");
-    }
-    return null;
-  }
-
-  public void publish(List<MetricAnomaly> metricAnomalies) {
-    if (CollectionUtils.isNotEmpty(metricAnomalies)) {
-      LOG.info("Sending metric anomalies of size : " + metricAnomalies.size());
-      List<TimelineMetric> metricList = getTimelineMetricList(metricAnomalies);
-      if (!metricList.isEmpty()) {
-        TimelineMetrics timelineMetrics = new TimelineMetrics();
-        timelineMetrics.setMetrics(metricList);
-        emitMetrics(timelineMetrics);
-      }
-    } else {
-      LOG.debug("No anomalies to send.");
-    }
-  }
-
-  private List<TimelineMetric> getTimelineMetricList(List<MetricAnomaly> metricAnomalies) {
-    List<TimelineMetric> metrics = new ArrayList<>();
-
-    if (metricAnomalies.isEmpty()) {
-      return metrics;
-    }
-
-    for (MetricAnomaly anomaly : metricAnomalies) {
-      TimelineMetric timelineMetric = new TimelineMetric();
-      timelineMetric.setMetricName(anomaly.getMetricKey());
-      timelineMetric.setAppId(serviceName + "-" + anomaly.getMethodType());
-      timelineMetric.setInstanceId(null);
-      timelineMetric.setHostName(getDefaultLocalHostName());
-      timelineMetric.setStartTime(anomaly.getTimestamp());
-      HashMap<String, String> metadata = new HashMap<>();
-      metadata.put("method", anomaly.getMethodType());
-      metadata.put("anomaly-score", String.valueOf(anomaly.getAnomalyScore()));
-      timelineMetric.setMetadata(metadata);
-      TreeMap<Long,Double> metricValues = new TreeMap<>();
-      metricValues.put(anomaly.getTimestamp(), anomaly.getMetricValue());
-      timelineMetric.setMetricValues(metricValues);
-
-      metrics.add(timelineMetric);
-    }
-    return metrics;
-  }
-
-  public boolean emitMetrics(TimelineMetrics metrics) {
-    String connectUrl = constructTimelineMetricUri();
-    String jsonData = null;
-    LOG.debug("EmitMetrics connectUrl = " + connectUrl);
-    try {
-      jsonData = mapper.writeValueAsString(metrics);
-      LOG.info(jsonData);
-    } catch (IOException e) {
-      LOG.error("Unable to parse metrics", e);
-    }
-    if (jsonData != null) {
-      return emitMetricsJson(connectUrl, jsonData);
-    }
-    return false;
-  }
-
-  private HttpURLConnection getConnection(String spec) throws IOException {
-    return (HttpURLConnection) new URL(spec).openConnection();
-  }
-
-  private boolean emitMetricsJson(String connectUrl, String jsonData) {
-    int timeout = 10000;
-    HttpURLConnection connection = null;
-    try {
-      if (connectUrl == null) {
-        throw new IOException("Unknown URL. Unable to connect to metrics collector.");
-      }
-      connection = getConnection(connectUrl);
-
-      connection.setRequestMethod("POST");
-      connection.setRequestProperty("Content-Type", "application/json");
-      connection.setRequestProperty("Connection", "Keep-Alive");
-      connection.setConnectTimeout(timeout);
-      connection.setReadTimeout(timeout);
-      connection.setDoOutput(true);
-
-      if (jsonData != null) {
-        try (OutputStream os = connection.getOutputStream()) {
-          os.write(jsonData.getBytes("UTF-8"));
-        }
-      }
-
-      int statusCode = connection.getResponseCode();
-
-      if (statusCode != 200) {
-        LOG.info("Unable to POST metrics to collector, " + connectUrl + ", " +
-          "statusCode = " + statusCode);
-      } else {
-        LOG.info("Metrics posted to Collector " + connectUrl);
-      }
-      return true;
-    } catch (IOException ioe) {
-      LOG.error(ioe.getMessage());
-    }
-    return false;
-  }
-
-  private String constructTimelineMetricUri() {
-    StringBuilder sb = new StringBuilder(protocol);
-    sb.append("://");
-    sb.append(collectorHost);
-    sb.append(":");
-    sb.append(port);
-    sb.append(WS_V1_TIMELINE_METRICS);
-    return sb.toString();
-  }
-
-  public TimelineMetrics fetchMetrics(String metricName,
-                                      String appId,
-                                      String hostname,
-                                      long startime,
-                                      long endtime) {
-
-    String url = constructTimelineMetricUri() + "?metricNames=" + metricName + "&appId=" + appId +
-      "&hostname=" + hostname + "&startTime=" + startime + "&endTime=" + endtime;
-    LOG.debug("Fetch metrics URL : " + url);
-
-    URL obj = null;
-    BufferedReader in = null;
-    TimelineMetrics timelineMetrics = new TimelineMetrics();
-
-    try {
-      obj = new URL(url);
-      HttpURLConnection con = (HttpURLConnection) obj.openConnection();
-      con.setRequestMethod("GET");
-      int responseCode = con.getResponseCode();
-      LOG.debug("Sending 'GET' request to URL : " + url);
-      LOG.debug("Response Code : " + responseCode);
-
-      in = new BufferedReader(
-        new InputStreamReader(con.getInputStream()));
-      timelineMetrics = timelineObjectReader.readValue(in);
-    } catch (Exception e) {
-      LOG.error(e);
-    } finally {
-      if (in != null) {
-        try {
-          in.close();
-        } catch (IOException e) {
-          LOG.warn(e);
-        }
-      }
-    }
-
-    LOG.info("Fetched " + timelineMetrics.getMetrics().size() + " metrics.");
-    return timelineMetrics;
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/PointInTimeADSystem.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/PointInTimeADSystem.java
deleted file mode 100644
index f379605..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/PointInTimeADSystem.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.core;
-
-import org.apache.ambari.metrics.adservice.prototype.common.DataSeries;
-import org.apache.ambari.metrics.adservice.prototype.common.ResultSet;
-import org.apache.ambari.metrics.adservice.prototype.methods.ema.EmaModel;
-import org.apache.ambari.metrics.adservice.prototype.methods.ema.EmaTechnique;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-public class PointInTimeADSystem implements Serializable {
-
-  //private EmaTechnique emaTechnique;
-  private MetricsCollectorInterface metricsCollectorInterface;
-  private Map<String, Double> tukeysNMap;
-  private double defaultTukeysN = 3;
-
-  private long testIntervalMillis = 5*60*1000; //10mins
-  private long trainIntervalMillis = 15*60*1000; //1hour
-
-  private static final Log LOG = LogFactory.getLog(PointInTimeADSystem.class);
-
-  private AmbariServerInterface ambariServerInterface;
-  private int sensitivity = 50;
-  private int minSensitivity = 0;
-  private int maxSensitivity = 100;
-
-  public PointInTimeADSystem(MetricsCollectorInterface metricsCollectorInterface, double defaultTukeysN,
-                             long testIntervalMillis, long trainIntervalMillis, String ambariServerHost, String clusterName) {
-    this.metricsCollectorInterface = metricsCollectorInterface;
-    this.defaultTukeysN = defaultTukeysN;
-    this.tukeysNMap = new HashMap<>();
-    this.testIntervalMillis = testIntervalMillis;
-    this.trainIntervalMillis = trainIntervalMillis;
-    this.ambariServerInterface = new AmbariServerInterface(ambariServerHost, clusterName);
-    LOG.info("Starting PointInTimeADSystem...");
-  }
-
-  public void runTukeysAndRefineEma(EmaTechnique emaTechnique, long startTime) {
-    LOG.info("Running Tukeys for test data interval [" + new Date(startTime - testIntervalMillis) + " : " + new Date(startTime) + "], with train data period [" + new Date(startTime  - testIntervalMillis - trainIntervalMillis) + " : " + new Date(startTime - testIntervalMillis) + "]");
-
-    int requiredSensivity = ambariServerInterface.getPointInTimeSensitivity();
-    if (requiredSensivity == -1 || requiredSensivity == sensitivity) {
-      LOG.info("No change in sensitivity needed.");
-    } else {
-      LOG.info("Current tukey's N value = " + defaultTukeysN);
-      if (requiredSensivity > sensitivity) {
-        int targetSensitivity = Math.min(maxSensitivity, requiredSensivity);
-        while (sensitivity < targetSensitivity) {
-          defaultTukeysN = defaultTukeysN + defaultTukeysN * 0.05;
-          sensitivity++;
-        }
-      } else {
-        int targetSensitivity = Math.max(minSensitivity, requiredSensivity);
-        while (sensitivity > targetSensitivity) {
-          defaultTukeysN = defaultTukeysN - defaultTukeysN * 0.05;
-          sensitivity--;
-        }
-      }
-      LOG.info("New tukey's N value = " + defaultTukeysN);
-    }
-
-    TimelineMetrics timelineMetrics = new TimelineMetrics();
-    for (String metricKey : emaTechnique.getTrackedEmas().keySet()) {
-      LOG.info("EMA key = " + metricKey);
-      EmaModel emaModel = emaTechnique.getTrackedEmas().get(metricKey);
-      String metricName = emaModel.getMetricName();
-      String appId = emaModel.getAppId();
-      String hostname = emaModel.getHostname();
-
-      TimelineMetrics tukeysData = metricsCollectorInterface.fetchMetrics(metricName, appId, hostname, startTime - (testIntervalMillis + trainIntervalMillis),
-        startTime);
-
-      if (tukeysData.getMetrics().isEmpty()) {
-        LOG.info("No metrics fetched for Tukeys, metricKey = " + metricKey);
-        continue;
-      }
-
-      List<Double> trainTsList = new ArrayList<>();
-      List<Double> trainDataList = new ArrayList<>();
-      List<Double> testTsList = new ArrayList<>();
-      List<Double> testDataList = new ArrayList<>();
-
-      for (TimelineMetric metric : tukeysData.getMetrics()) {
-        for (Long timestamp : metric.getMetricValues().keySet()) {
-          if (timestamp <= (startTime - testIntervalMillis)) {
-            trainDataList.add(metric.getMetricValues().get(timestamp));
-            trainTsList.add((double)timestamp);
-          } else {
-            testDataList.add(metric.getMetricValues().get(timestamp));
-            testTsList.add((double)timestamp);
-          }
-        }
-      }
-
-      if (trainDataList.isEmpty() || testDataList.isEmpty() || trainDataList.size() < testDataList.size()) {
-        LOG.info("Not enough train/test data to perform analysis.");
-        continue;
-      }
-
-      String tukeysTrainSeries = "tukeysTrainSeries";
-      double[] trainTs = new double[trainTsList.size()];
-      double[] trainData = new double[trainTsList.size()];
-      for (int i = 0; i < trainTs.length; i++) {
-        trainTs[i] = trainTsList.get(i);
-        trainData[i] = trainDataList.get(i);
-      }
-
-      String tukeysTestSeries = "tukeysTestSeries";
-      double[] testTs = new double[testTsList.size()];
-      double[] testData = new double[testTsList.size()];
-      for (int i = 0; i < testTs.length; i++) {
-        testTs[i] = testTsList.get(i);
-        testData[i] = testDataList.get(i);
-      }
-
-      LOG.info("Train Size = " + trainTs.length + ", Test Size = " + testTs.length);
-
-      DataSeries tukeysTrainData = new DataSeries(tukeysTrainSeries, trainTs, trainData);
-      DataSeries tukeysTestData = new DataSeries(tukeysTestSeries, testTs, testData);
-
-      if (!tukeysNMap.containsKey(metricKey)) {
-        tukeysNMap.put(metricKey, defaultTukeysN);
-      }
-
-      Map<String, String> configs = new HashMap<>();
-      configs.put("tukeys.n", String.valueOf(tukeysNMap.get(metricKey)));
-
-      ResultSet rs = RFunctionInvoker.tukeys(tukeysTrainData, tukeysTestData, configs);
-
-      List<TimelineMetric> tukeysMetrics = getAsTimelineMetric(rs, metricName, appId, hostname);
-      LOG.info("Tukeys anomalies size : " + tukeysMetrics.size());
-      TreeMap<Long, Double> tukeysMetricValues = new TreeMap<>();
-
-      for (TimelineMetric tukeysMetric : tukeysMetrics) {
-        tukeysMetricValues.putAll(tukeysMetric.getMetricValues());
-        timelineMetrics.addOrMergeTimelineMetric(tukeysMetric);
-      }
-
-      TimelineMetrics emaData = metricsCollectorInterface.fetchMetrics(metricKey, MetricsCollectorInterface.serviceName+"-ema", MetricsCollectorInterface.getDefaultLocalHostName(), startTime - testIntervalMillis, startTime);
-      TreeMap<Long, Double> emaMetricValues = new TreeMap();
-      if (!emaData.getMetrics().isEmpty()) {
-        emaMetricValues = emaData.getMetrics().get(0).getMetricValues();
-      }
-
-      LOG.info("Ema anomalies size : " + emaMetricValues.size());
-      int tp = 0;
-      int tn = 0;
-      int fp = 0;
-      int fn = 0;
-
-      for (double ts : testTs) {
-        long timestamp = (long) ts;
-        if (tukeysMetricValues.containsKey(timestamp)) {
-          if (emaMetricValues.containsKey(timestamp)) {
-            tp++;
-          } else {
-            fn++;
-          }
-        } else {
-          if (emaMetricValues.containsKey(timestamp)) {
-            fp++;
-          } else {
-            tn++;
-          }
-        }
-      }
-
-      double recall = (double) tp / (double) (tp + fn);
-      double precision = (double) tp / (double) (tp + fp);
-      LOG.info("----------------------------");
-      LOG.info("Precision Recall values for " + metricKey);
-      LOG.info("tp=" + tp + ", fp=" + fp + ", tn=" + tn + ", fn=" + fn);
-      LOG.info("----------------------------");
-
-      if (recall < 0.5) {
-        LOG.info("Increasing EMA sensitivity by 10%");
-        emaModel.updateModel(true, 5);
-      } else if (precision < 0.5) {
-        LOG.info("Decreasing EMA sensitivity by 10%");
-        emaModel.updateModel(false, 5);
-      }
-
-    }
-
-    if (emaTechnique.getTrackedEmas().isEmpty()){
-      LOG.info("No EMA Technique keys tracked!!!!");
-    }
-
-    if (!timelineMetrics.getMetrics().isEmpty()) {
-      metricsCollectorInterface.emitMetrics(timelineMetrics);
-    }
-  }
-
-  private static List<TimelineMetric> getAsTimelineMetric(ResultSet result, String metricName, String appId, String hostname) {
-
-    List<TimelineMetric> timelineMetrics = new ArrayList<>();
-
-    if (result == null) {
-      LOG.info("ResultSet from R call is null!!");
-      return null;
-    }
-
-    if (result.resultset.size() > 0) {
-      double[] ts = result.resultset.get(0);
-      double[] metrics = result.resultset.get(1);
-      double[] anomalyScore = result.resultset.get(2);
-      for (int i = 0; i < ts.length; i++) {
-        TimelineMetric timelineMetric = new TimelineMetric();
-        timelineMetric.setMetricName(metricName + ":" + appId + ":" + hostname);
-        timelineMetric.setHostName(MetricsCollectorInterface.getDefaultLocalHostName());
-        timelineMetric.setAppId(MetricsCollectorInterface.serviceName + "-tukeys");
-        timelineMetric.setInstanceId(null);
-        timelineMetric.setStartTime((long) ts[i]);
-        TreeMap<Long, Double> metricValues = new TreeMap<>();
-        metricValues.put((long) ts[i], metrics[i]);
-
-        HashMap<String, String> metadata = new HashMap<>();
-        metadata.put("method", "tukeys");
-        if (String.valueOf(anomalyScore[i]).equals("infinity")) {
-          LOG.info("Got anomalyScore = infinity for " + metricName + ":" + appId + ":" + hostname);
-        } else {
-          metadata.put("anomaly-score", String.valueOf(anomalyScore[i]));
-        }
-        timelineMetric.setMetadata(metadata);
-
-        timelineMetric.setMetricValues(metricValues);
-        timelineMetrics.add(timelineMetric);
-      }
-    }
-
-    return timelineMetrics;
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/RFunctionInvoker.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/RFunctionInvoker.java
deleted file mode 100644
index 8f1eba6..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/RFunctionInvoker.java
+++ /dev/null
@@ -1,222 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.core;
-
-
-import org.apache.ambari.metrics.adservice.prototype.common.DataSeries;
-import org.apache.ambari.metrics.adservice.prototype.common.ResultSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.rosuda.JRI.REXP;
-import org.rosuda.JRI.RVector;
-import org.rosuda.JRI.Rengine;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-public class RFunctionInvoker {
-
-  static final Log LOG = LogFactory.getLog(RFunctionInvoker.class);
-  public static Rengine r = new Rengine(new String[]{"--no-save"}, false, null);
-  private static String rScriptDir = "/usr/lib/ambari-metrics-collector/R-scripts";
-
-  private static void loadDataSets(Rengine r, DataSeries trainData, DataSeries testData) {
-    r.assign("train_ts", trainData.ts);
-    r.assign("train_x", trainData.values);
-    r.eval("train_data <- data.frame(train_ts,train_x)");
-    r.eval("names(train_data) <- c(\"TS\", " + trainData.seriesName + ")");
-
-    r.assign("test_ts", testData.ts);
-    r.assign("test_x", testData.values);
-    r.eval("test_data <- data.frame(test_ts,test_x)");
-    r.eval("names(test_data) <- c(\"TS\", " + testData.seriesName + ")");
-  }
-
-  public static void setScriptsDir(String dir) {
-    rScriptDir = dir;
-  }
-
-  public static ResultSet executeMethod(String methodType, DataSeries trainData, DataSeries testData, Map<String, String> configs) {
-
-    ResultSet result;
-    switch (methodType) {
-      case "tukeys":
-        result = tukeys(trainData, testData, configs);
-        break;
-      case "ema":
-        result = ema_global(trainData, testData, configs);
-        break;
-      case "ks":
-        result = ksTest(trainData, testData, configs);
-        break;
-      case "hsdev":
-        result = hsdev(trainData, testData, configs);
-        break;
-      default:
-        result = tukeys(trainData, testData, configs);
-        break;
-    }
-    return result;
-  }
-
-  public static ResultSet tukeys(DataSeries trainData, DataSeries testData, Map<String, String> configs) {
-    try {
-
-      REXP exp1 = r.eval("source('" + rScriptDir + "/tukeys.r" + "')");
-
-      double n = Double.parseDouble(configs.get("tukeys.n"));
-      r.eval("n <- " + n);
-
-      loadDataSets(r, trainData, testData);
-
-      r.eval("an <- ams_tukeys(train_data, test_data, n)");
-      REXP exp = r.eval("an");
-      RVector cont = (RVector) exp.getContent();
-      List<double[]> result = new ArrayList();
-      for (int i = 0; i < cont.size(); i++) {
-        result.add(cont.at(i).asDoubleArray());
-      }
-      return new ResultSet(result);
-    } catch (Exception e) {
-      LOG.error(e);
-    } finally {
-      r.end();
-    }
-    return null;
-  }
-
-  public static ResultSet ema_global(DataSeries trainData, DataSeries testData, Map<String, String> configs) {
-    try {
-      r.eval("source('" + rScriptDir + "/ema.r" + "')");
-
-      int n = Integer.parseInt(configs.get("ema.n"));
-      r.eval("n <- " + n);
-
-      double w = Double.parseDouble(configs.get("ema.w"));
-      r.eval("w <- " + w);
-
-      loadDataSets(r, trainData, testData);
-
-      r.eval("an <- ema_global(train_data, test_data, w, n)");
-      REXP exp = r.eval("an");
-      RVector cont = (RVector) exp.getContent();
-      List<double[]> result = new ArrayList();
-      for (int i = 0; i < cont.size(); i++) {
-        result.add(cont.at(i).asDoubleArray());
-      }
-      return new ResultSet(result);
-
-    } catch (Exception e) {
-      LOG.error(e);
-    } finally {
-      r.end();
-    }
-    return null;
-  }
-
-  public static ResultSet ema_daily(DataSeries trainData, DataSeries testData, Map<String, String> configs) {
-    try {
-      r.eval("source('" + rScriptDir + "/ema.r" + "')");
-
-      int n = Integer.parseInt(configs.get("ema.n"));
-      r.eval("n <- " + n);
-
-      double w = Double.parseDouble(configs.get("ema.w"));
-      r.eval("w <- " + w);
-
-      loadDataSets(r, trainData, testData);
-
-      r.eval("an <- ema_daily(train_data, test_data, w, n)");
-      REXP exp = r.eval("an");
-      RVector cont = (RVector) exp.getContent();
-      List<double[]> result = new ArrayList();
-      for (int i = 0; i < cont.size(); i++) {
-        result.add(cont.at(i).asDoubleArray());
-      }
-      return new ResultSet(result);
-
-    } catch (Exception e) {
-      LOG.error(e);
-    } finally {
-      r.end();
-    }
-    return null;
-  }
-
-  public static ResultSet ksTest(DataSeries trainData, DataSeries testData, Map<String, String> configs) {
-    try {
-      r.eval("source('" + rScriptDir + "/kstest.r" + "')");
-
-      double p_value = Double.parseDouble(configs.get("ks.p_value"));
-      r.eval("p_value <- " + p_value);
-
-      loadDataSets(r, trainData, testData);
-
-      r.eval("an <- ams_ks(train_data, test_data, p_value)");
-      REXP exp = r.eval("an");
-      RVector cont = (RVector) exp.getContent();
-      List<double[]> result = new ArrayList();
-      for (int i = 0; i < cont.size(); i++) {
-        result.add(cont.at(i).asDoubleArray());
-      }
-      return new ResultSet(result);
-
-    } catch (Exception e) {
-      LOG.error(e);
-    } finally {
-      r.end();
-    }
-    return null;
-  }
-
-  public static ResultSet hsdev(DataSeries trainData, DataSeries testData, Map<String, String> configs) {
-    try {
-      r.eval("source('" + rScriptDir + "/hsdev.r" + "')");
-
-      int n = Integer.parseInt(configs.get("hsdev.n"));
-      r.eval("n <- " + n);
-
-      int nhp = Integer.parseInt(configs.get("hsdev.nhp"));
-      r.eval("nhp <- " + nhp);
-
-      long interval = Long.parseLong(configs.get("hsdev.interval"));
-      r.eval("interval <- " + interval);
-
-      long period = Long.parseLong(configs.get("hsdev.period"));
-      r.eval("period <- " + period);
-
-      loadDataSets(r, trainData, testData);
-
-      r.eval("an2 <- hsdev_daily(train_data, test_data, n, nhp, interval, period)");
-      REXP exp = r.eval("an2");
-      RVector cont = (RVector) exp.getContent();
-
-      List<double[]> result = new ArrayList();
-      for (int i = 0; i < cont.size(); i++) {
-        result.add(cont.at(i).asDoubleArray());
-      }
-      return new ResultSet(result);
-    } catch (Exception e) {
-      LOG.error(e);
-    } finally {
-      r.end();
-    }
-    return null;
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/TrendADSystem.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/TrendADSystem.java
deleted file mode 100644
index 80212b3..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/TrendADSystem.java
+++ /dev/null
@@ -1,317 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.core;
-
-import org.apache.ambari.metrics.adservice.prototype.common.DataSeries;
-import org.apache.ambari.metrics.adservice.prototype.methods.MetricAnomaly;
-import org.apache.ambari.metrics.adservice.prototype.methods.hsdev.HsdevTechnique;
-import org.apache.ambari.metrics.adservice.prototype.methods.kstest.KSTechnique;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-
-import java.io.BufferedReader;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-
-public class TrendADSystem implements Serializable {
-
-  private MetricsCollectorInterface metricsCollectorInterface;
-  private List<TrendMetric> trendMetrics;
-
-  private long ksTestIntervalMillis = 10 * 60 * 1000;
-  private long ksTrainIntervalMillis = 10 * 60 * 1000;
-  private KSTechnique ksTechnique;
-
-  private HsdevTechnique hsdevTechnique;
-  private int hsdevNumHistoricalPeriods = 3;
-
-  private Map<KsSingleRunKey, MetricAnomaly> trackedKsAnomalies = new HashMap<>();
-  private static final Log LOG = LogFactory.getLog(TrendADSystem.class);
-  private String inputFile = "";
-
-  public TrendADSystem(MetricsCollectorInterface metricsCollectorInterface,
-                       long ksTestIntervalMillis,
-                       long ksTrainIntervalMillis,
-                       int hsdevNumHistoricalPeriods) {
-
-    this.metricsCollectorInterface = metricsCollectorInterface;
-    this.ksTestIntervalMillis = ksTestIntervalMillis;
-    this.ksTrainIntervalMillis = ksTrainIntervalMillis;
-    this.hsdevNumHistoricalPeriods = hsdevNumHistoricalPeriods;
-
-    this.ksTechnique = new KSTechnique();
-    this.hsdevTechnique = new HsdevTechnique();
-
-    trendMetrics = new ArrayList<>();
-  }
-
-  public void runKSTest(long currentEndTime, Set<TrendMetric> trendMetrics) {
-    readInputFile(inputFile);
-
-    long ksTestIntervalStartTime = currentEndTime - ksTestIntervalMillis;
-    LOG.info("Running KS Test for test data interval [" + new Date(ksTestIntervalStartTime) + " : " +
-      new Date(currentEndTime) + "], with train data period [" + new Date(ksTestIntervalStartTime - ksTrainIntervalMillis)
-      + " : " + new Date(ksTestIntervalStartTime) + "]");
-
-    for (TrendMetric metric : trendMetrics) {
-      String metricName = metric.metricName;
-      String appId = metric.appId;
-      String hostname = metric.hostname;
-      String key = metricName + ":" + appId + ":" + hostname;
-
-      TimelineMetrics ksData = metricsCollectorInterface.fetchMetrics(metricName, appId, hostname, ksTestIntervalStartTime - ksTrainIntervalMillis,
-        currentEndTime);
-
-      if (ksData.getMetrics().isEmpty()) {
-        LOG.info("No metrics fetched for KS, metricKey = " + key);
-        continue;
-      }
-
-      List<Double> trainTsList = new ArrayList<>();
-      List<Double> trainDataList = new ArrayList<>();
-      List<Double> testTsList = new ArrayList<>();
-      List<Double> testDataList = new ArrayList<>();
-
-      for (TimelineMetric timelineMetric : ksData.getMetrics()) {
-        for (Long timestamp : timelineMetric.getMetricValues().keySet()) {
-          if (timestamp <= ksTestIntervalStartTime) {
-            trainDataList.add(timelineMetric.getMetricValues().get(timestamp));
-            trainTsList.add((double) timestamp);
-          } else {
-            testDataList.add(timelineMetric.getMetricValues().get(timestamp));
-            testTsList.add((double) timestamp);
-          }
-        }
-      }
-
-      LOG.info("Train Data size : " + trainDataList.size() + ", Test Data Size : " + testDataList.size());
-      if (trainDataList.isEmpty() || testDataList.isEmpty() || trainDataList.size() < testDataList.size()) {
-        LOG.info("Not enough train/test data to perform KS analysis.");
-        continue;
-      }
-
-      String ksTrainSeries = "KSTrainSeries";
-      double[] trainTs = new double[trainTsList.size()];
-      double[] trainData = new double[trainTsList.size()];
-      for (int i = 0; i < trainTs.length; i++) {
-        trainTs[i] = trainTsList.get(i);
-        trainData[i] = trainDataList.get(i);
-      }
-
-      String ksTestSeries = "KSTestSeries";
-      double[] testTs = new double[testTsList.size()];
-      double[] testData = new double[testTsList.size()];
-      for (int i = 0; i < testTs.length; i++) {
-        testTs[i] = testTsList.get(i);
-        testData[i] = testDataList.get(i);
-      }
-
-      LOG.info("Train Size = " + trainTs.length + ", Test Size = " + testTs.length);
-
-      DataSeries ksTrainData = new DataSeries(ksTrainSeries, trainTs, trainData);
-      DataSeries ksTestData = new DataSeries(ksTestSeries, testTs, testData);
-
-      MetricAnomaly metricAnomaly = ksTechnique.runKsTest(key, ksTrainData, ksTestData);
-      if (metricAnomaly == null) {
-        LOG.info("No anomaly from KS test.");
-      } else {
-        LOG.info("Found Anomaly in KS Test. Publishing KS Anomaly metric....");
-        TimelineMetric timelineMetric = getAsTimelineMetric(metricAnomaly,
-          ksTestIntervalStartTime, currentEndTime, ksTestIntervalStartTime - ksTrainIntervalMillis, ksTestIntervalStartTime);
-        TimelineMetrics timelineMetrics = new TimelineMetrics();
-        timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
-        metricsCollectorInterface.emitMetrics(timelineMetrics);
-
-        trackedKsAnomalies.put(new KsSingleRunKey(ksTestIntervalStartTime, currentEndTime, metricName, appId, hostname), metricAnomaly);
-      }
-    }
-
-    if (trendMetrics.isEmpty()) {
-      LOG.info("No Trend metrics tracked!!!!");
-    }
-
-  }
-
-  private TimelineMetric getAsTimelineMetric(MetricAnomaly metricAnomaly,
-                                   long testStart,
-                                   long testEnd,
-                                   long trainStart,
-                                   long trainEnd) {
-
-    TimelineMetric timelineMetric = new TimelineMetric();
-    timelineMetric.setMetricName(metricAnomaly.getMetricKey());
-    timelineMetric.setAppId(MetricsCollectorInterface.serviceName + "-" + metricAnomaly.getMethodType());
-    timelineMetric.setInstanceId(null);
-    timelineMetric.setHostName(MetricsCollectorInterface.getDefaultLocalHostName());
-    timelineMetric.setStartTime(testEnd);
-    HashMap<String, String> metadata = new HashMap<>();
-    metadata.put("method", metricAnomaly.getMethodType());
-    metadata.put("anomaly-score", String.valueOf(metricAnomaly.getAnomalyScore()));
-    metadata.put("test-start-time", String.valueOf(testStart));
-    metadata.put("train-start-time", String.valueOf(trainStart));
-    metadata.put("train-end-time", String.valueOf(trainEnd));
-    timelineMetric.setMetadata(metadata);
-    TreeMap<Long,Double> metricValues = new TreeMap<>();
-    metricValues.put(testEnd, metricAnomaly.getMetricValue());
-    timelineMetric.setMetricValues(metricValues);
-    return timelineMetric;
-
-  }
-
-  public void runHsdevMethod() {
-
-    List<TimelineMetric> hsdevMetricAnomalies = new ArrayList<>();
-
-    for (KsSingleRunKey ksSingleRunKey : trackedKsAnomalies.keySet()) {
-
-      long hsdevTestEnd = ksSingleRunKey.endTime;
-      long hsdevTestStart = ksSingleRunKey.startTime;
-
-      long period = hsdevTestEnd - hsdevTestStart;
-
-      long hsdevTrainStart = hsdevTestStart - (hsdevNumHistoricalPeriods) * period;
-      long hsdevTrainEnd = hsdevTestStart;
-
-      LOG.info("Running HSdev Test for test data interval [" + new Date(hsdevTestStart) + " : " +
-        new Date(hsdevTestEnd) + "], with train data period [" + new Date(hsdevTrainStart)
-        + " : " + new Date(hsdevTrainEnd) + "]");
-
-      String metricName = ksSingleRunKey.metricName;
-      String appId = ksSingleRunKey.appId;
-      String hostname = ksSingleRunKey.hostname;
-      String key = metricName + "_" + appId + "_" + hostname;
-
-      TimelineMetrics hsdevData = metricsCollectorInterface.fetchMetrics(
-        metricName,
-        appId,
-        hostname,
-        hsdevTrainStart,
-        hsdevTestEnd);
-
-      if (hsdevData.getMetrics().isEmpty()) {
-        LOG.info("No metrics fetched for HSDev, metricKey = " + key);
-        continue;
-      }
-
-      List<Double> trainTsList = new ArrayList<>();
-      List<Double> trainDataList = new ArrayList<>();
-      List<Double> testTsList = new ArrayList<>();
-      List<Double> testDataList = new ArrayList<>();
-
-      for (TimelineMetric timelineMetric : hsdevData.getMetrics()) {
-        for (Long timestamp : timelineMetric.getMetricValues().keySet()) {
-          if (timestamp <= hsdevTestStart) {
-            trainDataList.add(timelineMetric.getMetricValues().get(timestamp));
-            trainTsList.add((double) timestamp);
-          } else {
-            testDataList.add(timelineMetric.getMetricValues().get(timestamp));
-            testTsList.add((double) timestamp);
-          }
-        }
-      }
-
-      if (trainDataList.isEmpty() || testDataList.isEmpty() || trainDataList.size() < testDataList.size()) {
-        LOG.info("Not enough train/test data to perform Hsdev analysis.");
-        continue;
-      }
-
-      String hsdevTrainSeries = "HsdevTrainSeries";
-      double[] trainTs = new double[trainTsList.size()];
-      double[] trainData = new double[trainTsList.size()];
-      for (int i = 0; i < trainTs.length; i++) {
-        trainTs[i] = trainTsList.get(i);
-        trainData[i] = trainDataList.get(i);
-      }
-
-      String hsdevTestSeries = "HsdevTestSeries";
-      double[] testTs = new double[testTsList.size()];
-      double[] testData = new double[testTsList.size()];
-      for (int i = 0; i < testTs.length; i++) {
-        testTs[i] = testTsList.get(i);
-        testData[i] = testDataList.get(i);
-      }
-
-      LOG.info("Train Size = " + trainTs.length + ", Test Size = " + testTs.length);
-
-      DataSeries hsdevTrainData = new DataSeries(hsdevTrainSeries, trainTs, trainData);
-      DataSeries hsdevTestData = new DataSeries(hsdevTestSeries, testTs, testData);
-
-      MetricAnomaly metricAnomaly = hsdevTechnique.runHsdevTest(key, hsdevTrainData, hsdevTestData);
-      if (metricAnomaly == null) {
-        LOG.info("No anomaly from Hsdev test. Mismatch between KS and HSDev. ");
-        ksTechnique.updateModel(key, false, 10);
-      } else {
-        LOG.info("Found Anomaly in Hsdev Test. This confirms KS anomaly.");
-        hsdevMetricAnomalies.add(getAsTimelineMetric(metricAnomaly,
-          hsdevTestStart, hsdevTestEnd, hsdevTrainStart, hsdevTrainEnd));
-      }
-    }
-    clearTrackedKsRunKeys();
-
-    if (!hsdevMetricAnomalies.isEmpty()) {
-      LOG.info("Publishing Hsdev Anomalies....");
-      TimelineMetrics timelineMetrics = new TimelineMetrics();
-      timelineMetrics.setMetrics(hsdevMetricAnomalies);
-      metricsCollectorInterface.emitMetrics(timelineMetrics);
-    }
-  }
-
-  private void clearTrackedKsRunKeys() {
-    trackedKsAnomalies.clear();
-  }
-
-  private void readInputFile(String fileName) {
-    trendMetrics.clear();
-    try (BufferedReader br = new BufferedReader(new FileReader(fileName))) {
-      for (String line; (line = br.readLine()) != null; ) {
-        String[] splits = line.split(",");
-        LOG.info("Adding a new metric to track in Trend AD system : " + splits[0]);
-        trendMetrics.add(new TrendMetric(splits[0], splits[1], splits[2]));
-      }
-    } catch (IOException e) {
-      LOG.error("Error reading input file : " + e);
-    }
-  }
-
-  class KsSingleRunKey implements Serializable{
-
-    long startTime;
-    long endTime;
-    String metricName;
-    String appId;
-    String hostname;
-
-    public KsSingleRunKey(long startTime, long endTime, String metricName, String appId, String hostname) {
-      this.startTime = startTime;
-      this.endTime = endTime;
-      this.metricName = metricName;
-      this.appId = appId;
-      this.hostname = hostname;
-    }
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/TrendMetric.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/TrendMetric.java
deleted file mode 100644
index d4db227..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/core/TrendMetric.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.core;
-
-import java.io.Serializable;
-
-public class TrendMetric implements Serializable {
-
-  String metricName;
-  String appId;
-  String hostname;
-
-  public TrendMetric(String metricName, String appId, String hostname) {
-    this.metricName = metricName;
-    this.appId = appId;
-    this.hostname = hostname;
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/AnomalyDetectionTechnique.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/AnomalyDetectionTechnique.java
deleted file mode 100644
index c19adda..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/AnomalyDetectionTechnique.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.methods;
-
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-
-import java.util.List;
-
-public abstract class AnomalyDetectionTechnique {
-
-  protected String methodType;
-
-  public abstract List<MetricAnomaly> test(TimelineMetric metric);
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/MetricAnomaly.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/MetricAnomaly.java
deleted file mode 100644
index 60ff11c..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/MetricAnomaly.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.methods;
-
-import java.io.Serializable;
-
-public class MetricAnomaly implements Serializable{
-
-  private String methodType;
-  private double anomalyScore;
-  private String metricKey;
-  private long timestamp;
-  private double metricValue;
-
-
-  public MetricAnomaly(String metricKey, long timestamp, double metricValue, String methodType, double anomalyScore) {
-    this.metricKey = metricKey;
-    this.timestamp = timestamp;
-    this.metricValue = metricValue;
-    this.methodType = methodType;
-    this.anomalyScore = anomalyScore;
-
-  }
-
-  public String getMethodType() {
-    return methodType;
-  }
-
-  public void setMethodType(String methodType) {
-    this.methodType = methodType;
-  }
-
-  public double getAnomalyScore() {
-    return anomalyScore;
-  }
-
-  public void setAnomalyScore(double anomalyScore) {
-    this.anomalyScore = anomalyScore;
-  }
-
-  public void setMetricKey(String metricKey) {
-    this.metricKey = metricKey;
-  }
-
-  public String getMetricKey() {
-    return metricKey;
-  }
-
-  public void setMetricName(String metricName) {
-    this.metricKey = metricName;
-  }
-
-  public long getTimestamp() {
-    return timestamp;
-  }
-
-  public void setTimestamp(long timestamp) {
-    this.timestamp = timestamp;
-  }
-
-  public double getMetricValue() {
-    return metricValue;
-  }
-
-  public void setMetricValue(double metricValue) {
-    this.metricValue = metricValue;
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/ema/EmaModel.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/ema/EmaModel.java
deleted file mode 100644
index 593028e..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/ema/EmaModel.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.methods.ema;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import javax.xml.bind.annotation.XmlRootElement;
-import java.io.Serializable;
-
-import static org.apache.ambari.metrics.adservice.prototype.methods.ema.EmaTechnique.suppressAnomaliesTheshold;
-
-@XmlRootElement
-public class EmaModel implements Serializable {
-
-  private String metricName;
-  private String hostname;
-  private String appId;
-  private double ema;
-  private double ems;
-  private double weight;
-  private double timessdev;
-
-  private int ctr = 0;
-
-  private static final Log LOG = LogFactory.getLog(EmaModel.class);
-
-  public EmaModel(String name, String hostname, String appId, double weight, double timessdev) {
-    this.metricName = name;
-    this.hostname = hostname;
-    this.appId = appId;
-    this.weight = weight;
-    this.timessdev = timessdev;
-    this.ema = 0.0;
-    this.ems = 0.0;
-  }
-
-  public String getMetricName() {
-    return metricName;
-  }
-
-  public String getHostname() {
-    return hostname;
-  }
-
-  public String getAppId() {
-    return appId;
-  }
-
-  public double testAndUpdate(double metricValue) {
-
-    double anomalyScore = 0.0;
-    LOG.info("Before Update ->" + metricName + ":" + appId + ":" + hostname + " - " + "ema = " + ema + ", ems = " + ems + ", timessdev = " + timessdev);
-    update(metricValue);
-    if (ctr > suppressAnomaliesTheshold) {
-      anomalyScore = test(metricValue);
-      if (anomalyScore > 0.0) {
-        LOG.info("Anomaly ->" + metricName + ":" + appId + ":" + hostname + " - " + "ema = " + ema + ", ems = " + ems +
-          ", timessdev = " + timessdev + ", metricValue = " + metricValue);
-      } else {
-        LOG.info("Not an Anomaly ->" + metricName + ":" + appId + ":" + hostname + " - " + "ema = " + ema + ", ems = " + ems +
-          ", timessdev = " + timessdev + ", metricValue = " + metricValue);
-      }
-    } else {
-      ctr++;
-      if (ctr > suppressAnomaliesTheshold) {
-        LOG.info("Ema Model for " + metricName + ":" + appId + ":" + hostname + " is ready for testing data.");
-      }
-    }
-    return anomalyScore;
-  }
-
-  public void update(double metricValue) {
-    ema = weight * ema + (1 - weight) * metricValue;
-    ems = Math.sqrt(weight * Math.pow(ems, 2.0) + (1 - weight) * Math.pow(metricValue - ema, 2.0));
-    LOG.debug("In update : ema = " + ema + ", ems = " + ems);
-  }
-
-  public double test(double metricValue) {
-    LOG.debug("In test : ema = " + ema + ", ems = " + ems);
-    double diff = Math.abs(ema - metricValue) - (timessdev * ems);
-    LOG.debug("diff = " + diff);
-    if (diff > 0) {
-      return Math.abs((metricValue - ema) / ems); //Z score
-    } else {
-      return 0.0;
-    }
-  }
-
-  public void updateModel(boolean increaseSensitivity, double percent) {
-    LOG.info("Updating model for " + metricName + " with increaseSensitivity = " + increaseSensitivity + ", percent = " + percent);
-    double delta = percent / 100;
-    if (increaseSensitivity) {
-      delta = delta * -1;
-    }
-    this.timessdev = timessdev + delta * timessdev;
-    //this.weight = Math.min(1.0, weight + delta * weight);
-    LOG.info("New model parameters " + metricName + " : timessdev = " + timessdev + ", weight = " + weight);
-  }
-
-  public double getWeight() {
-    return weight;
-  }
-
-  public void setWeight(double weight) {
-    this.weight = weight;
-  }
-
-  public double getTimessdev() {
-    return timessdev;
-  }
-
-  public void setTimessdev(double timessdev) {
-    this.timessdev = timessdev;
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/ema/EmaModelLoader.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/ema/EmaModelLoader.java
deleted file mode 100644
index 7623f27..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/ema/EmaModelLoader.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.methods.ema;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.spark.SparkContext;
-import org.apache.spark.mllib.util.Loader;
-
-public class EmaModelLoader implements Loader<EmaTechnique> {
-    private static final Log LOG = LogFactory.getLog(EmaModelLoader.class);
-
-    @Override
-    public EmaTechnique load(SparkContext sc, String path) {
-        return new EmaTechnique(0.5,3);
-//        Gson gson = new Gson();
-//        try {
-//            String fileString = new String(Files.readAllBytes(Paths.get(path)), StandardCharsets.UTF_8);
-//            return gson.fromJson(fileString, EmaTechnique.class);
-//        } catch (IOException e) {
-//            LOG.error(e);
-//        }
-//        return null;
-    }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/ema/EmaTechnique.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/ema/EmaTechnique.java
deleted file mode 100644
index 7ec17d8..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/ema/EmaTechnique.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.methods.ema;
-
-import com.google.gson.Gson;
-import org.apache.ambari.metrics.adservice.prototype.methods.AnomalyDetectionTechnique;
-import org.apache.ambari.metrics.adservice.prototype.methods.MetricAnomaly;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.spark.SparkContext;
-import org.apache.spark.mllib.util.Saveable;
-
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-import java.io.BufferedWriter;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Serializable;
-import java.io.Writer;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-@XmlRootElement
-public class EmaTechnique extends AnomalyDetectionTechnique implements Serializable, Saveable {
-
-  @XmlElement(name = "trackedEmas")
-  private Map<String, EmaModel> trackedEmas;
-  private static final Log LOG = LogFactory.getLog(EmaTechnique.class);
-
-  private double startingWeight = 0.5;
-  private double startTimesSdev = 3.0;
-  private String methodType = "ema";
-  public static int suppressAnomaliesTheshold = 100;
-
-  public EmaTechnique(double startingWeight, double startTimesSdev, int suppressAnomaliesTheshold) {
-    trackedEmas = new HashMap<>();
-    this.startingWeight = startingWeight;
-    this.startTimesSdev = startTimesSdev;
-    EmaTechnique.suppressAnomaliesTheshold = suppressAnomaliesTheshold;
-    LOG.info("New EmaTechnique......");
-  }
-
-  public EmaTechnique(double startingWeight, double startTimesSdev) {
-    trackedEmas = new HashMap<>();
-    this.startingWeight = startingWeight;
-    this.startTimesSdev = startTimesSdev;
-    LOG.info("New EmaTechnique......");
-  }
-
-  public List<MetricAnomaly> test(TimelineMetric metric) {
-    String metricName = metric.getMetricName();
-    String appId = metric.getAppId();
-    String hostname = metric.getHostName();
-    String key = metricName + ":" + appId + ":" + hostname;
-
-    EmaModel emaModel = trackedEmas.get(key);
-    if (emaModel == null) {
-      LOG.debug("EmaModel not present for " + key);
-      LOG.debug("Number of tracked Emas : " + trackedEmas.size());
-      emaModel  = new EmaModel(metricName, hostname, appId, startingWeight, startTimesSdev);
-      trackedEmas.put(key, emaModel);
-    } else {
-      LOG.debug("EmaModel already present for " + key);
-    }
-
-    List<MetricAnomaly> anomalies = new ArrayList<>();
-
-    for (Long timestamp : metric.getMetricValues().keySet()) {
-      double metricValue = metric.getMetricValues().get(timestamp);
-      double anomalyScore = emaModel.testAndUpdate(metricValue);
-      if (anomalyScore > 0.0) {
-        LOG.info("Found anomaly for : " + key + ", anomalyScore = " + anomalyScore);
-        MetricAnomaly metricAnomaly = new MetricAnomaly(key, timestamp, metricValue, methodType, anomalyScore);
-        anomalies.add(metricAnomaly);
-      } else {
-        LOG.debug("Discarding non-anomaly for : " + key);
-      }
-    }
-    return anomalies;
-  }
-
-  public boolean updateModel(TimelineMetric timelineMetric, boolean increaseSensitivity, double percent) {
-    String metricName = timelineMetric.getMetricName();
-    String appId = timelineMetric.getAppId();
-    String hostname = timelineMetric.getHostName();
-    String key = metricName + "_" + appId + "_" + hostname;
-
-
-    EmaModel emaModel = trackedEmas.get(key);
-
-    if (emaModel == null) {
-      LOG.warn("EMA Model for " + key + " not found");
-      return false;
-    }
-    emaModel.updateModel(increaseSensitivity, percent);
-
-    return true;
-  }
-
-  @Override
-  public void save(SparkContext sc, String path) {
-    Gson gson = new Gson();
-    try {
-      String json = gson.toJson(this);
-      try (Writer writer = new BufferedWriter(new OutputStreamWriter(
-        new FileOutputStream(path), "utf-8"))) {
-        writer.write(json);
-      }
-    } catch (IOException e) {
-      LOG.error(e);
-    }
-  }
-
-  @Override
-  public String formatVersion() {
-    return "1.0";
-  }
-
-  public Map<String, EmaModel> getTrackedEmas() {
-    return trackedEmas;
-  }
-
-  public double getStartingWeight() {
-    return startingWeight;
-  }
-
-  public double getStartTimesSdev() {
-    return startTimesSdev;
-  }
-
-}
-
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/hsdev/HsdevTechnique.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/hsdev/HsdevTechnique.java
deleted file mode 100644
index 855cc70..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/hsdev/HsdevTechnique.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.methods.hsdev;
-
-import org.apache.ambari.metrics.adservice.prototype.common.DataSeries;
-import org.apache.ambari.metrics.adservice.prototype.methods.MetricAnomaly;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import java.io.Serializable;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.apache.ambari.metrics.adservice.prototype.common.StatisticUtils.median;
-import static org.apache.ambari.metrics.adservice.prototype.common.StatisticUtils.sdev;
-
-public class HsdevTechnique implements Serializable {
-
-  private Map<String, Double> hsdevMap;
-  private String methodType = "hsdev";
-  private static final Log LOG = LogFactory.getLog(HsdevTechnique.class);
-
-  public HsdevTechnique() {
-    hsdevMap = new HashMap<>();
-  }
-
-  public MetricAnomaly runHsdevTest(String key, DataSeries trainData, DataSeries testData) {
-    int testLength = testData.values.length;
-    int trainLength = trainData.values.length;
-
-    if (trainLength < testLength) {
-      LOG.info("Not enough train data.");
-      return null;
-    }
-
-    if (!hsdevMap.containsKey(key)) {
-      hsdevMap.put(key, 3.0);
-    }
-
-    double n = hsdevMap.get(key);
-
-    double historicSd = sdev(trainData.values, false);
-    double historicMedian = median(trainData.values);
-    double currentMedian = median(testData.values);
-
-
-    if (historicSd > 0) {
-      double diff = Math.abs(currentMedian - historicMedian);
-      LOG.info("Found anomaly for metric : " + key + " in the period ending " + new Date((long)testData.ts[testLength - 1]));
-      LOG.info("Current median = " + currentMedian + ", Historic Median = " + historicMedian + ", HistoricSd = " + historicSd);
-
-      if (diff > n * historicSd) {
-        double zScore = diff / historicSd;
-        LOG.info("Z Score of current series : " + zScore);
-        return new MetricAnomaly(key,
-          (long) testData.ts[testLength - 1],
-          testData.values[testLength - 1],
-          methodType,
-          zScore);
-      }
-    }
-
-    return null;
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/kstest/KSTechnique.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/kstest/KSTechnique.java
deleted file mode 100644
index 0dc679e..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/methods/kstest/KSTechnique.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.prototype.methods.kstest;
-
-import org.apache.ambari.metrics.adservice.prototype.common.DataSeries;
-import org.apache.ambari.metrics.adservice.prototype.common.ResultSet;
-import org.apache.ambari.metrics.adservice.prototype.core.RFunctionInvoker;
-import org.apache.ambari.metrics.adservice.prototype.methods.MetricAnomaly;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import java.io.Serializable;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-
-public class KSTechnique implements Serializable {
-
-  private String methodType = "ks";
-  private Map<String, Double> pValueMap;
-  private static final Log LOG = LogFactory.getLog(KSTechnique.class);
-
-  public KSTechnique() {
-    pValueMap = new HashMap();
-  }
-
-  public MetricAnomaly runKsTest(String key, DataSeries trainData, DataSeries testData) {
-
-    int testLength = testData.values.length;
-    int trainLength = trainData.values.length;
-
-    if (trainLength < testLength) {
-      LOG.info("Not enough train data.");
-      return null;
-    }
-
-    if (!pValueMap.containsKey(key)) {
-      pValueMap.put(key, 0.05);
-    }
-    double pValue = pValueMap.get(key);
-
-    ResultSet result = RFunctionInvoker.ksTest(trainData, testData, Collections.singletonMap("ks.p_value", String.valueOf(pValue)));
-    if (result == null) {
-      LOG.error("Resultset is null when invoking KS R function...");
-      return null;
-    }
-
-    if (result.resultset.size() > 0) {
-
-      LOG.info("Is size 1 ? result size = " + result.resultset.get(0).length);
-      LOG.info("p_value = " + result.resultset.get(3)[0]);
-      double dValue = result.resultset.get(2)[0];
-
-      return new MetricAnomaly(key,
-        (long) testData.ts[testLength - 1],
-        testData.values[testLength - 1],
-        methodType,
-        dValue);
-    }
-
-    return null;
-  }
-
-  public void updateModel(String metricKey, boolean increaseSensitivity, double percent) {
-
-    LOG.info("Updating KS model for " + metricKey + " with increaseSensitivity = " + increaseSensitivity + ", percent = " + percent);
-
-    if (!pValueMap.containsKey(metricKey)) {
-      LOG.error("Unknown metric key : " + metricKey);
-      LOG.info("pValueMap :" + pValueMap.toString());
-      return;
-    }
-
-    double delta = percent / 100;
-    if (!increaseSensitivity) {
-      delta = delta * -1;
-    }
-
-    double pValue = pValueMap.get(metricKey);
-    double newPValue = Math.min(1.0, pValue + delta * pValue);
-    pValueMap.put(metricKey, newPValue);
-    LOG.info("New pValue = " + newPValue);
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/MetricAnomalyDetectorTestInput.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/MetricAnomalyDetectorTestInput.java
deleted file mode 100644
index 9a002a1..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/MetricAnomalyDetectorTestInput.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.prototype.testing.utilities;
-
-import javax.xml.bind.annotation.XmlRootElement;
-import java.util.List;
-import java.util.Map;
-
-@XmlRootElement
-public class MetricAnomalyDetectorTestInput {
-
-  public MetricAnomalyDetectorTestInput() {
-  }
-
-  //Train data
-  private String trainDataName;
-  private String trainDataType;
-  private Map<String, String> trainDataConfigs;
-  private int trainDataSize;
-
-  //Test data
-  private String testDataName;
-  private String testDataType;
-  private Map<String, String> testDataConfigs;
-  private int testDataSize;
-
-  //Algorithm data
-  private List<String> methods;
-  private Map<String, String> methodConfigs;
-
-  public String getTrainDataName() {
-    return trainDataName;
-  }
-
-  public void setTrainDataName(String trainDataName) {
-    this.trainDataName = trainDataName;
-  }
-
-  public String getTrainDataType() {
-    return trainDataType;
-  }
-
-  public void setTrainDataType(String trainDataType) {
-    this.trainDataType = trainDataType;
-  }
-
-  public Map<String, String> getTrainDataConfigs() {
-    return trainDataConfigs;
-  }
-
-  public void setTrainDataConfigs(Map<String, String> trainDataConfigs) {
-    this.trainDataConfigs = trainDataConfigs;
-  }
-
-  public String getTestDataName() {
-    return testDataName;
-  }
-
-  public void setTestDataName(String testDataName) {
-    this.testDataName = testDataName;
-  }
-
-  public String getTestDataType() {
-    return testDataType;
-  }
-
-  public void setTestDataType(String testDataType) {
-    this.testDataType = testDataType;
-  }
-
-  public Map<String, String> getTestDataConfigs() {
-    return testDataConfigs;
-  }
-
-  public void setTestDataConfigs(Map<String, String> testDataConfigs) {
-    this.testDataConfigs = testDataConfigs;
-  }
-
-  public Map<String, String> getMethodConfigs() {
-    return methodConfigs;
-  }
-
-  public void setMethodConfigs(Map<String, String> methodConfigs) {
-    this.methodConfigs = methodConfigs;
-  }
-
-  public int getTrainDataSize() {
-    return trainDataSize;
-  }
-
-  public void setTrainDataSize(int trainDataSize) {
-    this.trainDataSize = trainDataSize;
-  }
-
-  public int getTestDataSize() {
-    return testDataSize;
-  }
-
-  public void setTestDataSize(int testDataSize) {
-    this.testDataSize = testDataSize;
-  }
-
-  public List<String> getMethods() {
-    return methods;
-  }
-
-  public void setMethods(List<String> methods) {
-    this.methods = methods;
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/MetricAnomalyTester.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/MetricAnomalyTester.java
deleted file mode 100644
index 10b3a71..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/MetricAnomalyTester.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.prototype.testing.utilities;
-
-/**
- * Class which was originally used to send test series from AMS to Spark through Kafka.
- */
-public class MetricAnomalyTester {
-
-//  public static String appId = MetricsCollectorInterface.serviceName;
-//  static final Log LOG = LogFactory.getLog(MetricAnomalyTester.class);
-//  static Map<String, TimelineMetric> timelineMetricMap = new HashMap<>();
-//
-//  public static TimelineMetrics runTestAnomalyRequest(MetricAnomalyDetectorTestInput input) throws UnknownHostException {
-//
-//    long currentTime = System.currentTimeMillis();
-//    TimelineMetrics timelineMetrics = new TimelineMetrics();
-//    String hostname = InetAddress.getLocalHost().getHostName();
-//
-//    //Train data
-//    TimelineMetric metric1 = new TimelineMetric();
-//    if (StringUtils.isNotEmpty(input.getTrainDataName())) {
-//      metric1 = timelineMetricMap.get(input.getTrainDataName());
-//      if (metric1 == null) {
-//        metric1 = new TimelineMetric();
-//        double[] trainSeries = MetricSeriesGeneratorFactory.generateSeries(input.getTrainDataType(), input.getTrainDataSize(), input.getTrainDataConfigs());
-//        metric1.setMetricName(input.getTrainDataName());
-//        metric1.setAppId(appId);
-//        metric1.setHostName(hostname);
-//        metric1.setStartTime(currentTime);
-//        metric1.setInstanceId(null);
-//        metric1.setMetricValues(getAsTimeSeries(currentTime, trainSeries));
-//        timelineMetricMap.put(input.getTrainDataName(), metric1);
-//      }
-//      timelineMetrics.getMetrics().add(metric1);
-//    } else {
-//      LOG.error("No train data name specified");
-//    }
-//
-//    //Test data
-//    TimelineMetric metric2 = new TimelineMetric();
-//    if (StringUtils.isNotEmpty(input.getTestDataName())) {
-//      metric2 = timelineMetricMap.get(input.getTestDataName());
-//      if (metric2 == null) {
-//        metric2 = new TimelineMetric();
-//        double[] testSeries = MetricSeriesGeneratorFactory.generateSeries(input.getTestDataType(), input.getTestDataSize(), input.getTestDataConfigs());
-//        metric2.setMetricName(input.getTestDataName());
-//        metric2.setAppId(appId);
-//        metric2.setHostName(hostname);
-//        metric2.setStartTime(currentTime);
-//        metric2.setInstanceId(null);
-//        metric2.setMetricValues(getAsTimeSeries(currentTime, testSeries));
-//        timelineMetricMap.put(input.getTestDataName(), metric2);
-//      }
-//      timelineMetrics.getMetrics().add(metric2);
-//    } else {
-//      LOG.warn("No test data name specified");
-//    }
-//
-//    //Invoke method
-//    if (CollectionUtils.isNotEmpty(input.getMethods())) {
-//      RFunctionInvoker.setScriptsDir("/etc/ambari-metrics-collector/conf/R-scripts");
-//      for (String methodType : input.getMethods()) {
-//        ResultSet result = RFunctionInvoker.executeMethod(methodType, getAsDataSeries(metric1), getAsDataSeries(metric2), input.getMethodConfigs());
-//        TimelineMetric timelineMetric = getAsTimelineMetric(result, methodType, input, currentTime, hostname);
-//        if (timelineMetric != null) {
-//          timelineMetrics.getMetrics().add(timelineMetric);
-//        }
-//      }
-//    } else {
-//      LOG.warn("No anomaly method requested");
-//    }
-//
-//    return timelineMetrics;
-//  }
-//
-//
-//  private static TimelineMetric getAsTimelineMetric(ResultSet result, String methodType, MetricAnomalyDetectorTestInput input, long currentTime, String hostname) {
-//
-//    if (result == null) {
-//      return null;
-//    }
-//
-//    TimelineMetric timelineMetric = new TimelineMetric();
-//    if (methodType.equals("tukeys") || methodType.equals("ema")) {
-//      timelineMetric.setMetricName(input.getTrainDataName() + "_" + input.getTestDataName() + "_" + methodType + "_" + currentTime);
-//      timelineMetric.setHostName(hostname);
-//      timelineMetric.setAppId(appId);
-//      timelineMetric.setInstanceId(null);
-//      timelineMetric.setStartTime(currentTime);
-//
-//      TreeMap<Long, Double> metricValues = new TreeMap<>();
-//      if (result.resultset.size() > 0) {
-//        double[] ts = result.resultset.get(0);
-//        double[] metrics = result.resultset.get(1);
-//        for (int i = 0; i < ts.length; i++) {
-//          if (i == 0) {
-//            timelineMetric.setStartTime((long) ts[i]);
-//          }
-//          metricValues.put((long) ts[i], metrics[i]);
-//        }
-//      }
-//      timelineMetric.setMetricValues(metricValues);
-//      return timelineMetric;
-//    }
-//    return null;
-//  }
-//
-//
-//  private static TreeMap<Long, Double> getAsTimeSeries(long currentTime, double[] values) {
-//
-//    long startTime = currentTime - (values.length - 1) * 60 * 1000;
-//    TreeMap<Long, Double> metricValues = new TreeMap<>();
-//
-//    for (int i = 0; i < values.length; i++) {
-//      metricValues.put(startTime, values[i]);
-//      startTime += (60 * 1000);
-//    }
-//    return metricValues;
-//  }
-//
-//  private static DataSeries getAsDataSeries(TimelineMetric timelineMetric) {
-//
-//    TreeMap<Long, Double> metricValues = timelineMetric.getMetricValues();
-//    double[] timestamps = new double[metricValues.size()];
-//    double[] values = new double[metricValues.size()];
-//    int i = 0;
-//
-//    for (Long timestamp : metricValues.keySet()) {
-//      timestamps[i] = timestamp;
-//      values[i++] = metricValues.get(timestamp);
-//    }
-//    return new DataSeries(timelineMetric.getMetricName() + "_" + timelineMetric.getAppId() + "_" + timelineMetric.getHostName(), timestamps, values);
-//  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/TestMetricSeriesGenerator.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/TestMetricSeriesGenerator.java
deleted file mode 100644
index 3b2605b..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/TestMetricSeriesGenerator.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.testing.utilities;
-
-/**
- * Class which was originally used to send test series from AMS to Spark through Kafka.
- */
-
-public class TestMetricSeriesGenerator {
-  //implements Runnable {
-
-//  private Map<TestSeriesInputRequest, AbstractMetricSeries> configuredSeries = new HashMap<>();
-//  private static final Log LOG = LogFactory.getLog(TestMetricSeriesGenerator.class);
-//  private TimelineMetricStore metricStore;
-//  private String hostname;
-//
-//  public TestMetricSeriesGenerator(TimelineMetricStore metricStore) {
-//    this.metricStore = metricStore;
-//    try {
-//      this.hostname = InetAddress.getLocalHost().getHostName();
-//    } catch (UnknownHostException e) {
-//      e.printStackTrace();
-//    }
-//  }
-//
-//  public void addSeries(TestSeriesInputRequest inputRequest) {
-//    if (!configuredSeries.containsKey(inputRequest)) {
-//      AbstractMetricSeries metricSeries = MetricSeriesGeneratorFactory.generateSeries(inputRequest.getSeriesType(), inputRequest.getConfigs());
-//      configuredSeries.put(inputRequest, metricSeries);
-//      LOG.info("Added series " + inputRequest.getSeriesName());
-//    }
-//  }
-//
-//  public void removeSeries(String seriesName) {
-//    boolean isPresent = false;
-//    TestSeriesInputRequest tbd = null;
-//    for (TestSeriesInputRequest inputRequest : configuredSeries.keySet()) {
-//      if (inputRequest.getSeriesName().equals(seriesName)) {
-//        isPresent = true;
-//        tbd = inputRequest;
-//      }
-//    }
-//    if (isPresent) {
-//      LOG.info("Removing series " + seriesName);
-//      configuredSeries.remove(tbd);
-//    } else {
-//      LOG.info("Series not found : " + seriesName);
-//    }
-//  }
-//
-//  @Override
-//  public void run() {
-//    long currentTime = System.currentTimeMillis();
-//    TimelineMetrics timelineMetrics = new TimelineMetrics();
-//
-//    for (TestSeriesInputRequest input : configuredSeries.keySet()) {
-//      AbstractMetricSeries metricSeries = configuredSeries.get(input);
-//      TimelineMetric timelineMetric = new TimelineMetric();
-//      timelineMetric.setMetricName(input.getSeriesName());
-//      timelineMetric.setAppId("anomaly-engine-test-metric");
-//      timelineMetric.setInstanceId(null);
-//      timelineMetric.setStartTime(currentTime);
-//      timelineMetric.setHostName(hostname);
-//      TreeMap<Long, Double> metricValues = new TreeMap();
-//      metricValues.put(currentTime, metricSeries.nextValue());
-//      timelineMetric.setMetricValues(metricValues);
-//      timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
-//      LOG.info("Emitting metric with appId = " + timelineMetric.getAppId());
-//    }
-//    try {
-//      LOG.info("Publishing test metrics for " + timelineMetrics.getMetrics().size() + " series.");
-//      metricStore.putMetrics(timelineMetrics);
-//    } catch (Exception e) {
-//      LOG.error(e);
-//    }
-//  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/TestSeriesInputRequest.java b/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/TestSeriesInputRequest.java
deleted file mode 100644
index d7db9ca..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/java/org/apache/ambari/metrics/adservice/prototype/testing/utilities/TestSeriesInputRequest.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype.testing.utilities;
-
-import org.apache.htrace.fasterxml.jackson.core.JsonProcessingException;
-import org.apache.htrace.fasterxml.jackson.databind.ObjectMapper;
-
-import javax.xml.bind.annotation.XmlRootElement;
-import java.util.Collections;
-import java.util.Map;
-
-@XmlRootElement
-public class TestSeriesInputRequest {
-
-  private String seriesName;
-  private String seriesType;
-  private Map<String, String> configs;
-
-  public TestSeriesInputRequest() {
-  }
-
-  public TestSeriesInputRequest(String seriesName, String seriesType, Map<String, String> configs) {
-    this.seriesName = seriesName;
-    this.seriesType = seriesType;
-    this.configs = configs;
-  }
-
-  public String getSeriesName() {
-    return seriesName;
-  }
-
-  public void setSeriesName(String seriesName) {
-    this.seriesName = seriesName;
-  }
-
-  public String getSeriesType() {
-    return seriesType;
-  }
-
-  public void setSeriesType(String seriesType) {
-    this.seriesType = seriesType;
-  }
-
-  public Map<String, String> getConfigs() {
-    return configs;
-  }
-
-  public void setConfigs(Map<String, String> configs) {
-    this.configs = configs;
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    TestSeriesInputRequest anotherInput = (TestSeriesInputRequest)o;
-    return anotherInput.getSeriesName().equals(this.getSeriesName());
-  }
-
-  @Override
-  public int hashCode() {
-    return seriesName.hashCode();
-  }
-
-  public static void main(String[] args) {
-
-    ObjectMapper objectMapper = new ObjectMapper();
-    TestSeriesInputRequest testSeriesInputRequest = new TestSeriesInputRequest("test", "ema", Collections.singletonMap("key","value"));
-    try {
-      System.out.print(objectMapper.writeValueAsString(testSeriesInputRequest));
-    } catch (JsonProcessingException e) {
-      e.printStackTrace();
-    }
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/ema.R b/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/ema.R
deleted file mode 100644
index 0b66095..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/ema.R
+++ /dev/null
@@ -1,96 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#  EMA <- w * EMA + (1 - w) * x
-# EMS <- sqrt( w * EMS^2 + (1 - w) * (x - EMA)^2 )
-# Alarm = abs(x - EMA) > n * EMS
-
-ema_global <- function(train_data, test_data, w, n) {
-  
-#  res <- get_data(url)
-#  data <- data.frame(as.numeric(names(res$metrics[[1]]$metrics)), as.numeric(res$metrics[[1]]$metrics))
-#  names(data) <- c("TS", res$metrics[[1]]$metricname)
-#  train_data <- data[which(data$TS >= train_start & data$TS <= train_end), 2]
-#  test_data <- data[which(data$TS >= test_start & data$TS <= test_end), ]
-  
-  anomalies <- data.frame()
-  ema <- 0
-  ems <- 0
-
-  #Train Step
-  for (x in train_data) {
-    ema <- w*ema + (1-w)*x
-    ems <- sqrt(w* ems^2 + (1 - w)*(x - ema)^2)
-  }
-  
-  for ( i in 1:length(test_data[,1])) {
-    x <- test_data[i,2]
-    if (abs(x - ema) > n*ems) {
-      anomaly <- c(as.numeric(test_data[i,1]), x)
-      # print (anomaly)
-      anomalies <- rbind(anomalies, anomaly)
-    }
-    ema <- w*ema + (1-w)*x
-    ems <- sqrt(w* ems^2 + (1 - w)*(x - ema)^2)
-  }
-  
-  if(length(anomalies) > 0) {
-    names(anomalies) <- c("TS", "Value")
-  }
-  return (anomalies)
-}
-
-ema_daily <- function(train_data, test_data, w, n) {
-  
-#  res <- get_data(url)
-#  data <- data.frame(as.numeric(names(res$metrics[[1]]$metrics)), as.numeric(res$metrics[[1]]$metrics))
-#  names(data) <- c("TS", res$metrics[[1]]$metricname)
-#  train_data <- data[which(data$TS >= train_start & data$TS <= train_end), ]
-#  test_data <- data[which(data$TS >= test_start & data$TS <= test_end), ]
-  
-  anomalies <- data.frame()
-  ema <- vector("numeric", 7)
-  ems <- vector("numeric", 7)
-  
-  #Train Step
-  for ( i in 1:length(train_data[,1])) {
-    x <- train_data[i,2]
-    time <- as.POSIXlt(as.numeric(train_data[i,1])/1000, origin = "1970-01-01" ,tz = "GMT")
-    index <- time$wday
-    ema[index] <- w*ema[index] + (1-w)*x
-    ems[index] <- sqrt(w* ems[index]^2 + (1 - w)*(x - ema[index])^2)
-  }
-  
-  for ( i in 1:length(test_data[,1])) {
-    x <- test_data[i,2]
-    time <- as.POSIXlt(as.numeric(test_data[i,1])/1000, origin = "1970-01-01" ,tz = "GMT")
-    index <- time$wday
-    
-    if (abs(x - ema[index+1]) > n*ems[index+1]) {
-      anomaly <- c(as.numeric(test_data[i,1]), x)
-      # print (anomaly)
-      anomalies <- rbind(anomalies, anomaly)
-    }
-    ema[index+1] <- w*ema[index+1] + (1-w)*x
-    ems[index+1] <- sqrt(w* ems[index+1]^2 + (1 - w)*(x - ema[index+1])^2)
-  }
-  
-  if(length(anomalies) > 0) {
-    names(anomalies) <- c("TS", "Value")
-  }
-  return(anomalies)
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/hsdev.r b/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/hsdev.r
deleted file mode 100644
index bca3366..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/hsdev.r
+++ /dev/null
@@ -1,67 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-hsdev_daily <- function(train_data, test_data, n, num_historic_periods, interval, period) {
-
-  #res <- get_data(url)
-  #data <- data.frame(as.numeric(names(res$metrics[[1]]$metrics)), as.numeric(res$metrics[[1]]$metrics))
-  #names(data) <- c("TS", res$metrics[[1]]$metricname)
-  anomalies <- data.frame()
-
-  granularity <- train_data[2,1] - train_data[1,1]
-  test_start <- test_data[1,1]
-  test_end <- test_data[length(test_data[1,]),1]
-  train_start <- test_start - num_historic_periods*period
-  # round to start of day
-  train_start <- train_start - (train_start %% interval)
-
-  time <- as.POSIXlt(as.numeric(test_data[1,1])/1000, origin = "1970-01-01" ,tz = "GMT")
-  test_data_day <- time$wday
-
-  h_data <- c()
-  for ( i in length(train_data[,1]):1) {
-    ts <- train_data[i,1]
-    if ( ts < train_start) {
-      break
-    }
-    time <- as.POSIXlt(as.numeric(ts)/1000, origin = "1970-01-01" ,tz = "GMT")
-    if (time$wday == test_data_day) {
-      x <- train_data[i,2]
-      h_data <- c(h_data, x)
-    }
-  }
-
-  if (length(h_data) < 2*length(test_data[,1])) {
-    cat ("\nNot enough training data")
-    return (anomalies)
-  }
-
-  past_median <- median(h_data)
-  past_sd <- sd(h_data)
-  curr_median <- median(test_data[,2])
-
-  if (abs(curr_median - past_median) > n * past_sd) {
-    anomaly <- c(test_start, test_end, curr_median, past_median, past_sd)
-    anomalies <- rbind(anomalies, anomaly)
-  }
-
-  if(length(anomalies) > 0) {
-    names(anomalies) <- c("TS Start", "TS End", "Current Median", "Past Median", "Past SD")
-  }
-
-  return (anomalies)
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/iforest.R b/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/iforest.R
deleted file mode 100644
index 8956400..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/iforest.R
+++ /dev/null
@@ -1,52 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-ams_iforest <- function(url, train_start, train_end, test_start, test_end, threshold_score) {
-  
-  res <- get_data(url)
-  num_metrics <- length(res$metrics)
-  anomalies <- data.frame()
-  
-  metricname <- res$metrics[[1]]$metricname
-  data <- data.frame(as.numeric(names(res$metrics[[1]]$metrics)), as.numeric(res$metrics[[1]]$metrics))
-  names(data) <- c("TS", res$metrics[[1]]$metricname)
-
-  for (i in 2:num_metrics) {
-    metricname <- res$metrics[[i]]$metricname
-    df <- data.frame(as.numeric(names(res$metrics[[i]]$metrics)), as.numeric(res$metrics[[i]]$metrics))
-    names(df) <- c("TS", res$metrics[[i]]$metricname)
-    data <- merge(data, df)
-  }
-  
-  algo_data <- data[ which(df$TS >= train_start & df$TS <= train_end) , ][c(1:num_metrics+1)]
-  iForest <- IsolationTrees(algo_data)
-  test_data <- data[ which(df$TS >= test_start & df$TS <= test_end) , ]
-  
-  if_res <- AnomalyScore(test_data[c(1:num_metrics+1)], iForest)
-  for (i in 1:length(if_res$outF)) {
-    index <- test_start+i-1
-    if (if_res$outF[i] > threshold_score) {
-      anomaly <- c(test_data[i,1], if_res$outF[i], if_res$pathLength[i])
-      anomalies <- rbind(anomalies, anomaly)
-    } 
-  }
-  
-  if(length(anomalies) > 0) {
-    names(anomalies) <- c("TS", "Anomaly Score", "Path length")
-  }
-  return (anomalies)
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/kstest.r b/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/kstest.r
deleted file mode 100644
index f22bc15..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/kstest.r
+++ /dev/null
@@ -1,38 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-ams_ks <- function(train_data, test_data, p_value) {
-  
-#  res <- get_data(url)
-#  data <- data.frame(as.numeric(names(res$metrics[[1]]$metrics)), as.numeric(res$metrics[[1]]$metrics))
-#  names(data) <- c("TS", res$metrics[[1]]$metricname)
-#  train_data <- data[which(data$TS >= train_start & data$TS <= train_end), 2]
-#  test_data <- data[which(data$TS >= test_start & data$TS <= test_end), 2]
-  
-  anomalies <- data.frame()
-  res <- ks.test(train_data[,2], test_data[,2])
-  
-  if (res[2] < p_value) {
-    anomaly <- c(test_data[1,1], test_data[length(test_data),1], res[1], res[2])
-    anomalies <- rbind(anomalies, anomaly)
-  }
- 
-  if(length(anomalies) > 0) {
-    names(anomalies) <- c("TS Start", "TS End", "D", "p-value")
-  }
-  return (anomalies)
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/test.R b/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/test.R
deleted file mode 100644
index 7650356..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/test.R
+++ /dev/null
@@ -1,85 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-tukeys_anomalies <- data.frame()
-ema_global_anomalies <- data.frame()
-ema_daily_anomalies <- data.frame()
-ks_anomalies <- data.frame()
-hsdev_anomalies <- data.frame()
-
-init <- function() {
-  tukeys_anomalies <- data.frame()
-  ema_global_anomalies <- data.frame()
-  ema_daily_anomalies <- data.frame()
-  ks_anomalies <- data.frame()
-  hsdev_anomalies <- data.frame()
-}
-
-test_methods <- function(data) {
-
-  init()
-  #res <- get_data(url)
-  #data <- data.frame(as.numeric(names(res$metrics[[1]]$metrics)), as.numeric(res$metrics[[1]]$metrics))
-  #names(data) <- c("TS", res$metrics[[1]]$metricname)
-
-  limit <- data[length(data[,1]),1]
-  step <- data[2,1] - data[1,1]
-
-  train_start <- data[1,1]
-  train_end <- get_next_day_boundary(train_start, step, limit)
-  test_start <- train_end + step
-  test_end <- get_next_day_boundary(test_start, step, limit)
-  i <- 1
-  day <- 24*60*60*1000
-
-  while (test_start < limit) {
-
-    print (i)
-    i <- i + 1
-    train_data <- data[which(data$TS >= train_start & data$TS <= train_end),]
-    test_data <- data[which(data$TS >= test_start & data$TS <= test_end), ]
-
-    #tukeys_anomalies <<- rbind(tukeys_anomalies, ams_tukeys(train_data, test_data, 3))
-    #ema_global_anomalies <<- rbind(ema_global_anomalies, ema_global(train_data, test_data, 0.9, 3))
-    #ema_daily_anomalies <<- rbind(ema_daily_anomalies, ema_daily(train_data, test_data, 0.9, 3))
-    #ks_anomalies <<- rbind(ks_anomalies, ams_ks(train_data, test_data, 0.05))
-    hsdev_train_data <- data[which(data$TS < test_start),]
-    hsdev_anomalies <<- rbind(hsdev_anomalies, hsdev_daily(hsdev_train_data, test_data, 3, 3, day, 7*day))
-
-    train_start <- test_start
-    train_end <- get_next_day_boundary(train_start, step, limit)
-    test_start <- train_end + step
-    test_end <- get_next_day_boundary(test_start, step, limit)
-  }
-  return (hsdev_anomalies)
-}
-
-get_next_day_boundary <- function(start, step, limit) {
-
-  if (start > limit) {
-    return (-1)
-  }
-
-  while (start <= limit) {
-    if (((start %% (24*60*60*1000)) - 28800000) == 0) {
-      return (start)
-    }
-    start <- start + step
-  }
-  return (start)
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/tukeys.r b/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/tukeys.r
deleted file mode 100644
index 0312226..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/resources/R-scripts/tukeys.r
+++ /dev/null
@@ -1,51 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-ams_tukeys <- function(train_data, test_data, n) {
-
-#  res <- get_data(url)
-#  data <- data.frame(as.numeric(names(res$metrics[[1]]$metrics)), as.numeric(res$metrics[[1]]$metrics))
-#  names(data) <- c("TS", res$metrics[[1]]$metricname)
-#  train_data <- data[which(data$TS >= train_start & data$TS <= train_end), 2]
-#  test_data <- data[which(data$TS >= test_start & data$TS <= test_end), ]
-
-  anomalies <- data.frame()
-  quantiles <- quantile(train_data[,2])
-  iqr <- quantiles[4] - quantiles[2]
-  niqr <- 0
-
-  for ( i in 1:length(test_data[,1])) {
-    x <- test_data[i,2]
-    lb <- quantiles[2] - n*iqr
-    ub <- quantiles[4] + n*iqr
-    if ( (x < lb)  || (x > ub) ) {
-      if (iqr != 0) {
-        if (x < lb) {
-          niqr <- (quantiles[2] - x) / iqr
-        } else {
-          niqr <- (x - quantiles[4]) / iqr
-        }
-      }
-        anomaly <- c(test_data[i,1], x, niqr)
-        anomalies <- rbind(anomalies, anomaly)
-      }
-  }
-  if(length(anomalies) > 0) {
-    names(anomalies) <- c("TS", "Value", "niqr")
-  }
-  return (anomalies)
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/resources/hbase-site.xml b/ambari-metrics-anomaly-detection-service/src/main/resources/hbase-site.xml
deleted file mode 100644
index 66f0454..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/resources/hbase-site.xml
+++ /dev/null
@@ -1,286 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration>
-    
-    <property>
-      <name>dfs.client.read.shortcircuit</name>
-      <value>true</value>
-    </property>
-    
-    <property>
-      <name>hbase.client.scanner.caching</name>
-      <value>10000</value>
-    </property>
-    
-    <property>
-      <name>hbase.client.scanner.timeout.period</name>
-      <value>300000</value>
-    </property>
-    
-    <property>
-      <name>hbase.cluster.distributed</name>
-      <value>false</value>
-    </property>
-    
-    <property>
-      <name>hbase.hregion.majorcompaction</name>
-      <value>0</value>
-    </property>
-    
-    <property>
-      <name>hbase.hregion.max.filesize</name>
-      <value>4294967296</value>
-    </property>
-    
-    <property>
-      <name>hbase.hregion.memstore.block.multiplier</name>
-      <value>4</value>
-    </property>
-    
-    <property>
-      <name>hbase.hregion.memstore.flush.size</name>
-      <value>134217728</value>
-    </property>
-    
-    <property>
-      <name>hbase.hstore.blockingStoreFiles</name>
-      <value>200</value>
-    </property>
-    
-    <property>
-      <name>hbase.hstore.flusher.count</name>
-      <value>2</value>
-    </property>
-    
-    <property>
-      <name>hbase.local.dir</name>
-      <value>${hbase.tmp.dir}/local</value>
-    </property>
-    
-    <property>
-      <name>hbase.master.info.bindAddress</name>
-      <value>0.0.0.0</value>
-    </property>
-    
-    <property>
-      <name>hbase.master.info.port</name>
-      <value>61310</value>
-    </property>
-    
-    <property>
-      <name>hbase.master.normalizer.class</name>
-      <value>org.apache.hadoop.hbase.master.normalizer.SimpleRegionNormalizer</value>
-    </property>
-    
-    <property>
-      <name>hbase.master.port</name>
-      <value>61300</value>
-    </property>
-    
-    <property>
-      <name>hbase.master.wait.on.regionservers.mintostart</name>
-      <value>1</value>
-    </property>
-    
-    <property>
-      <name>hbase.normalizer.enabled</name>
-      <value>false</value>
-    </property>
-    
-    <property>
-      <name>hbase.normalizer.period</name>
-      <value>600000</value>
-    </property>
-    
-    <property>
-      <name>hbase.regionserver.global.memstore.lowerLimit</name>
-      <value>0.3</value>
-    </property>
-    
-    <property>
-      <name>hbase.regionserver.global.memstore.upperLimit</name>
-      <value>0.35</value>
-    </property>
-    
-    <property>
-      <name>hbase.regionserver.info.port</name>
-      <value>61330</value>
-    </property>
-    
-    <property>
-      <name>hbase.regionserver.port</name>
-      <value>61320</value>
-    </property>
-    
-    <property>
-      <name>hbase.regionserver.thread.compaction.large</name>
-      <value>2</value>
-    </property>
-    
-    <property>
-      <name>hbase.regionserver.thread.compaction.small</name>
-      <value>3</value>
-    </property>
-    
-    <property>
-      <name>hbase.replication</name>
-      <value>false</value>
-    </property>
-    
-    <property>
-      <name>hbase.rootdir</name>
-      <value>file:///var/lib/ambari-metrics-collector/hbase</value>
-    </property>
-    
-    <property>
-      <name>hbase.rpc.timeout</name>
-      <value>300000</value>
-    </property>
-    
-    <property>
-      <name>hbase.snapshot.enabled</name>
-      <value>false</value>
-    </property>
-    
-    <property>
-      <name>hbase.superuser</name>
-      <value>activity_explorer,activity_analyzer</value>
-    </property>
-    
-    <property>
-      <name>hbase.tmp.dir</name>
-      <value>/var/lib/ambari-metrics-collector/hbase-tmp</value>
-    </property>
-    
-    <property>
-      <name>hbase.zookeeper.leaderport</name>
-      <value>61388</value>
-    </property>
-    
-    <property>
-      <name>hbase.zookeeper.peerport</name>
-      <value>61288</value>
-    </property>
-    
-    <property>
-      <name>hbase.zookeeper.property.clientPort</name>
-      <value>61181</value>
-    </property>
-    
-    <property>
-      <name>hbase.zookeeper.property.dataDir</name>
-      <value>${hbase.tmp.dir}/zookeeper</value>
-    </property>
-    
-    <property>
-      <name>hbase.zookeeper.property.tickTime</name>
-      <value>6000</value>
-    </property>
-    
-    <property>
-      <name>hbase.zookeeper.quorum</name>
-      <value>c6401.ambari.apache.org</value>
-      <final>true</final>
-    </property>
-    
-    <property>
-      <name>hfile.block.cache.size</name>
-      <value>0.3</value>
-    </property>
-    
-    <property>
-      <name>phoenix.coprocessor.maxMetaDataCacheSize</name>
-      <value>20480000</value>
-    </property>
-    
-    <property>
-      <name>phoenix.coprocessor.maxServerCacheTimeToLiveMs</name>
-      <value>60000</value>
-    </property>
-    
-    <property>
-      <name>phoenix.groupby.maxCacheSize</name>
-      <value>307200000</value>
-    </property>
-    
-    <property>
-      <name>phoenix.mutate.batchSize</name>
-      <value>10000</value>
-    </property>
-    
-    <property>
-      <name>phoenix.query.keepAliveMs</name>
-      <value>300000</value>
-    </property>
-    
-    <property>
-      <name>phoenix.query.maxGlobalMemoryPercentage</name>
-      <value>15</value>
-    </property>
-    
-    <property>
-      <name>phoenix.query.rowKeyOrderSaltedTable</name>
-      <value>true</value>
-    </property>
-    
-    <property>
-      <name>phoenix.query.spoolThresholdBytes</name>
-      <value>20971520</value>
-    </property>
-    
-    <property>
-      <name>phoenix.query.timeoutMs</name>
-      <value>300000</value>
-    </property>
-    
-    <property>
-      <name>phoenix.sequence.saltBuckets</name>
-      <value>2</value>
-    </property>
-    
-    <property>
-      <name>phoenix.spool.directory</name>
-      <value>${hbase.tmp.dir}/phoenix-spool</value>
-    </property>
-    
-    <property>
-      <name>zookeeper.session.timeout</name>
-      <value>120000</value>
-    </property>
-    
-    <property>
-      <name>zookeeper.session.timeout.localHBaseCluster</name>
-      <value>120000</value>
-    </property>
-    
-    <property>
-      <name>zookeeper.znode.parent</name>
-      <value>/ams-hbase-unsecure</value>
-    </property>
-
-    <property>
-      <name>hbase.use.dynamic.jars</name>
-      <value>false</value>
-    </property>
-
-  </configuration>
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/resources/input-config.properties b/ambari-metrics-anomaly-detection-service/src/main/resources/input-config.properties
deleted file mode 100644
index ab106c4..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/resources/input-config.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2011 The Apache Software Foundation
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-appIds=HOST
-
-collectorHost=localhost
-collectorPort=6188
-collectorProtocol=http
-
-zkQuorum=localhost:2181
-
-ambariServerHost=localhost
-clusterName=c1
-
-emaW=0.8
-emaN=3
-tukeysN=3
-pointInTimeTestInterval=300000
-pointInTimeTrainInterval=900000
-
-ksTestInterval=600000
-ksTrainInterval=600000
-hsdevNhp=3
-hsdevInterval=1800000;
-
-skipMetricPatterns=sdisk*,cpu_sintr*,proc*,disk*,boottime
-hosts=avijayan-ad-1.openstacklocal
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/ADServiceScalaModule.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/ADServiceScalaModule.scala
deleted file mode 100644
index 8578a80..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/ADServiceScalaModule.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.app
-
-import com.fasterxml.jackson.module.scala._
-import com.fasterxml.jackson.module.scala.deser.{ScalaNumberDeserializersModule, UntypedObjectDeserializerModule}
-import com.fasterxml.jackson.module.scala.introspect.{ScalaAnnotationIntrospector, ScalaAnnotationIntrospectorModule}
-
-/**
-  * Extended Jackson Module that fixes the Scala-Jackson BytecodeReadingParanamer issue.
-  */
-class ADServiceScalaModule extends JacksonModule
-  with IteratorModule
-  with EnumerationModule
-  with OptionModule
-  with SeqModule
-  with IterableModule
-  with TupleModule
-  with MapModule
-  with SetModule
-  with FixedScalaAnnotationIntrospectorModule
-  with UntypedObjectDeserializerModule
-  with EitherModule {
-
-  override def getModuleName = "ADServiceScalaModule"
-
-  object ADServiceScalaModule extends ADServiceScalaModule
-
-}
-
-
-trait FixedScalaAnnotationIntrospectorModule extends JacksonModule {
-  this += { _.appendAnnotationIntrospector(ScalaAnnotationIntrospector) }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionApp.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionApp.scala
deleted file mode 100644
index 2d0dbdf..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionApp.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.app
-
-import javax.ws.rs.Path
-import javax.ws.rs.container.{ContainerRequestFilter, ContainerResponseFilter}
-
-import org.apache.ambari.metrics.adservice.app.GuiceInjector.{withInjector, wrap}
-import org.apache.ambari.metrics.adservice.db.{AdAnomalyStoreAccessor, MetadataDatasource}
-import org.apache.ambari.metrics.adservice.metadata.MetricDefinitionService
-import org.apache.ambari.metrics.adservice.service.ADQueryService
-import org.glassfish.jersey.filter.LoggingFilter
-
-import com.codahale.metrics.health.HealthCheck
-import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}
-import com.fasterxml.jackson.datatype.joda.JodaModule
-import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider
-import com.fasterxml.jackson.module.scala.DefaultScalaModule
-
-import io.dropwizard.Application
-import io.dropwizard.setup.Environment
-
-class AnomalyDetectionApp extends Application[AnomalyDetectionAppConfig] {
-  override def getName = "anomaly-detection-service"
-
-  override def run(t: AnomalyDetectionAppConfig, env: Environment): Unit = {
-    configure(t, env)
-  }
-
-  def configure(config: AnomalyDetectionAppConfig, env: Environment) {
-    withInjector(new AnomalyDetectionAppModule(config, env)) { injector =>
-      injector.instancesWithAnnotation(classOf[Path]).foreach { r => env.jersey().register(r) }
-      injector.instancesOfType(classOf[HealthCheck]).foreach { h => env.healthChecks.register(h.getClass.getName, h) }
-      injector.instancesOfType(classOf[ContainerRequestFilter]).foreach { f => env.jersey().register(f) }
-      injector.instancesOfType(classOf[ContainerResponseFilter]).foreach { f => env.jersey().register(f) }
-
-      //Initialize Services
-      injector.getInstance(classOf[MetadataDatasource]).initialize
-      injector.getInstance(classOf[MetricDefinitionService]).initialize
-      injector.getInstance(classOf[ADQueryService]).initialize
-    }
-    env.jersey.register(jacksonJaxbJsonProvider)
-    env.jersey.register(new LoggingFilter)
-  }
-
-  private def jacksonJaxbJsonProvider: JacksonJaxbJsonProvider = {
-    val provider = new JacksonJaxbJsonProvider()
-    val objectMapper = new ObjectMapper()
-    objectMapper.registerModule(new ADServiceScalaModule)
-    objectMapper.registerModule(new JodaModule)
-    objectMapper.configure(SerializationFeature.WRAP_ROOT_VALUE, false)
-    objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false)
-    objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false)
-    objectMapper.configure(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS, true)
-    provider.setMapper(objectMapper)
-    provider
-  }
-
-  override def bootstrapLogging(): Unit = {}
-}
-
-
-object AnomalyDetectionApp {
-  def main(args: Array[String]): Unit = new AnomalyDetectionApp().run(args: _*)
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionAppConfig.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionAppConfig.scala
deleted file mode 100644
index 58efa97..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionAppConfig.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.app
-
-import javax.validation.Valid
-
-import org.apache.ambari.metrics.adservice.configuration.{HBaseConfiguration, _}
-
-import com.fasterxml.jackson.annotation.{JsonIgnore, JsonIgnoreProperties, JsonProperty}
-
-import io.dropwizard.Configuration
-
-/**
-  * Top Level AD System Manager config items.
-  */
-@JsonIgnoreProperties(ignoreUnknown=true)
-class AnomalyDetectionAppConfig extends Configuration {
-
-  /*
-   Metric Definition Service configuration
-    */
-  @Valid
-  private val metricDefinitionServiceConfiguration = new MetricDefinitionServiceConfiguration
-
-  @Valid
-  private val metricCollectorConfiguration = new MetricCollectorConfiguration
-
-  /*
-   Anomaly Query Service configuration
-    */
-  @Valid
-  private val adServiceConfiguration = new AdServiceConfiguration
-
-  /**
-    * LevelDB settings for metrics definitions
-    */
-  @Valid
-  private val metricDefinitionDBConfiguration = new MetricDefinitionDBConfiguration
-
-  /**
-    * Spark configurations
-    */
-  @Valid
-  private val sparkConfiguration = new SparkConfiguration
-
-  /*
-   AMS HBase Conf
-    */
-  @JsonIgnore
-  def getHBaseConf : org.apache.hadoop.conf.Configuration = {
-    HBaseConfiguration.getHBaseConf
-  }
-
-  @JsonProperty("metricDefinitionService")
-  def getMetricDefinitionServiceConfiguration: MetricDefinitionServiceConfiguration = {
-    metricDefinitionServiceConfiguration
-  }
-
-  @JsonProperty("adQueryService")
-  def getAdServiceConfiguration: AdServiceConfiguration = {
-    adServiceConfiguration
-  }
-
-  @JsonProperty("metricsCollector")
-  def getMetricCollectorConfiguration: MetricCollectorConfiguration = metricCollectorConfiguration
-
-  @JsonProperty("metricDefinitionDB")
-  def getMetricDefinitionDBConfiguration: MetricDefinitionDBConfiguration = metricDefinitionDBConfiguration
-
-  @JsonProperty("spark")
-  def getSparkConfiguration: SparkConfiguration = sparkConfiguration
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionAppModule.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionAppModule.scala
deleted file mode 100644
index 68e9df9..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionAppModule.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.app
-
-import org.apache.ambari.metrics.adservice.db._
-import org.apache.ambari.metrics.adservice.leveldb.LevelDBDataSource
-import org.apache.ambari.metrics.adservice.metadata.{MetricDefinitionService, MetricDefinitionServiceImpl}
-import org.apache.ambari.metrics.adservice.resource.{AnomalyResource, MetricDefinitionResource, RootResource}
-import org.apache.ambari.metrics.adservice.service.{ADQueryService, ADQueryServiceImpl}
-
-import com.codahale.metrics.health.HealthCheck
-import com.google.inject.AbstractModule
-import com.google.inject.multibindings.Multibinder
-
-import io.dropwizard.setup.Environment
-
-class AnomalyDetectionAppModule(config: AnomalyDetectionAppConfig, env: Environment) extends AbstractModule {
-  override def configure() {
-    bind(classOf[AnomalyDetectionAppConfig]).toInstance(config)
-    bind(classOf[Environment]).toInstance(env)
-    val healthCheckBinder = Multibinder.newSetBinder(binder(), classOf[HealthCheck])
-    healthCheckBinder.addBinding().to(classOf[DefaultHealthCheck])
-    bind(classOf[AnomalyResource])
-    bind(classOf[MetricDefinitionResource])
-    bind(classOf[RootResource])
-    bind(classOf[AdMetadataStoreAccessor]).to(classOf[AdMetadataStoreAccessorImpl])
-    bind(classOf[ADQueryService]).to(classOf[ADQueryServiceImpl])
-    bind(classOf[MetricDefinitionService]).to(classOf[MetricDefinitionServiceImpl])
-    bind(classOf[MetadataDatasource]).to(classOf[LevelDBDataSource])
-    bind(classOf[AdAnomalyStoreAccessor]).to(classOf[PhoenixAnomalyStoreAccessor])
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/DefaultHealthCheck.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/DefaultHealthCheck.scala
deleted file mode 100644
index c36e8d2..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/DefaultHealthCheck.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.app
-
-import com.codahale.metrics.health.HealthCheck
-import com.codahale.metrics.health.HealthCheck.Result
-
-class DefaultHealthCheck extends HealthCheck {
-  override def check(): Result = Result.healthy()
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/GuiceInjector.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/GuiceInjector.scala
deleted file mode 100644
index 37da5f9..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/app/GuiceInjector.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.app
-
-import java.lang.annotation.Annotation
-
-import com.google.inject.{Guice, Injector, Module, TypeLiteral}
-
-import scala.collection.JavaConversions._
-import scala.language.implicitConversions
-import scala.reflect._
-
-object GuiceInjector {
-
-  def withInjector(modules: Module*)(fn: (Injector) => Unit) = {
-    val injector = Guice.createInjector(modules.toList: _*)
-    fn(injector)
-  }
-
-  implicit def wrap(injector: Injector): InjectorWrapper = new InjectorWrapper(injector)
-}
-
-class InjectorWrapper(injector: Injector) {
-  def instancesWithAnnotation[T <: Annotation](annotationClass: Class[T]): List[AnyRef] = {
-    injector.getAllBindings.filter { case (k, v) =>
-      !k.getTypeLiteral.getRawType.getAnnotationsByType[T](annotationClass).isEmpty
-    }.map { case (k, v) => injector.getInstance(k).asInstanceOf[AnyRef] }.toList
-  }
-
-  def instancesOfType[T: ClassTag](typeClass: Class[T]): List[T] = {
-    injector.findBindingsByType(TypeLiteral.get(classTag[T].runtimeClass)).map { b =>
-      injector.getInstance(b.getKey).asInstanceOf[T]
-    }.toList
-  }
-
-  def dumpBindings(): Unit = {
-    injector.getBindings.keySet() foreach { k =>
-      println(s"bind key = ${k.toString}")
-    }
-  }
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/AdServiceConfiguration.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/AdServiceConfiguration.scala
deleted file mode 100644
index 11e9f28..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/AdServiceConfiguration.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.configuration
-
-import javax.validation.constraints.NotNull
-
-import com.fasterxml.jackson.annotation.JsonProperty
-
-/**
-  * Class to get Anomaly Service specific configuration.
-  */
-class AdServiceConfiguration {
-
-  @NotNull
-  var anomalyDataTtl: Long = _
-
-  @JsonProperty
-  def getAnomalyDataTtl: Long = anomalyDataTtl
-
-  @JsonProperty
-  def setAnomalyDataTtl(anomalyDataTtl: Long): Unit = {
-    this.anomalyDataTtl = anomalyDataTtl
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/HBaseConfiguration.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/HBaseConfiguration.scala
deleted file mode 100644
index a95ff15..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/HBaseConfiguration.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.configuration
-
-import java.net.{MalformedURLException, URISyntaxException}
-
-import org.apache.hadoop.conf.Configuration
-import org.slf4j.{Logger, LoggerFactory}
-
-object HBaseConfiguration {
-
-  val HBASE_SITE_CONFIGURATION_FILE: String = "hbase-site.xml"
-  val hbaseConf: org.apache.hadoop.conf.Configuration = new Configuration(true)
-  var isInitialized: Boolean = false
-  val LOG : Logger = LoggerFactory.getLogger("HBaseConfiguration")
-
-  /**
-    * Initialize the hbase conf from hbase-site present in classpath.
-    */
-  def initConfigs(): Unit = {
-    if (!isInitialized) {
-      var classLoader: ClassLoader = Thread.currentThread.getContextClassLoader
-      if (classLoader == null) classLoader = getClass.getClassLoader
-
-      try {
-        val hbaseResUrl = classLoader.getResource(HBASE_SITE_CONFIGURATION_FILE)
-        if (hbaseResUrl == null) throw new IllegalStateException("Unable to initialize the AD subsystem. No hbase-site present in the classpath.")
-
-        hbaseConf.addResource(hbaseResUrl.toURI.toURL)
-        isInitialized = true
-
-      } catch {
-        case me : MalformedURLException => println("MalformedURLException")
-        case ue : URISyntaxException => println("URISyntaxException")
-      }
-    }
-  }
-
-  def getHBaseConf: org.apache.hadoop.conf.Configuration = {
-    if (!isInitialized) {
-      initConfigs()
-    }
-    hbaseConf
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/MetricCollectorConfiguration.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/MetricCollectorConfiguration.scala
deleted file mode 100644
index 2530730..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/MetricCollectorConfiguration.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.configuration
-
-import javax.validation.constraints.NotNull
-
-import com.fasterxml.jackson.annotation.JsonProperty
-
-/**
-  * Class to capture the Metrics Collector related configuration.
-  */
-class MetricCollectorConfiguration {
-
-  @NotNull
-  private var hosts: String = _
-
-  @NotNull
-  private var port: String = _
-
-  @NotNull
-  private var protocol: String = _
-
-  @NotNull
-  private var metadataEndpoint: String = _
-
-  @JsonProperty
-  def getHosts: String = hosts
-
-  @JsonProperty
-  def getPort: String = port
-
-  @JsonProperty
-  def getProtocol: String = protocol
-
-  @JsonProperty
-  def getMetadataEndpoint: String = metadataEndpoint
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/MetricDefinitionDBConfiguration.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/MetricDefinitionDBConfiguration.scala
deleted file mode 100644
index ef4e00c..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/MetricDefinitionDBConfiguration.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.configuration
-
-import javax.validation.constraints.NotNull
-
-import com.fasterxml.jackson.annotation.JsonProperty
-
-class MetricDefinitionDBConfiguration {
-
-  @NotNull
-  private var dbDirPath: String = _
-  private var verifyChecksums: Boolean = true
-  private var performParanoidChecks: Boolean = false
-
-  @JsonProperty("verifyChecksums")
-  def getVerifyChecksums: Boolean = verifyChecksums
-
-  @JsonProperty("performParanoidChecks")
-  def getPerformParanoidChecks: Boolean = performParanoidChecks
-
-  @JsonProperty("dbDirPath")
-  def getDbDirPath: String = dbDirPath
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/MetricDefinitionServiceConfiguration.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/MetricDefinitionServiceConfiguration.scala
deleted file mode 100644
index a453f03..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/MetricDefinitionServiceConfiguration.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.configuration
-
-import com.fasterxml.jackson.annotation.JsonProperty
-
-/**
-  * Class to capture the Metric Definition Service configuration.
-  */
-class MetricDefinitionServiceConfiguration {
-
-  private val inputDefinitionDirectory: String = ""
-
-  @JsonProperty
-  def getInputDefinitionDirectory: String = inputDefinitionDirectory
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/SparkConfiguration.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/SparkConfiguration.scala
deleted file mode 100644
index 30efdc7..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/configuration/SparkConfiguration.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.configuration
-
-import javax.validation.constraints.NotNull
-
-import com.fasterxml.jackson.annotation.JsonProperty
-
-class SparkConfiguration {
-
-  @NotNull
-  private var mode: String = _
-
-  @NotNull
-  private var masterHostPort: String = _
-
-  @JsonProperty
-  def getMode: String = mode
-
-  @JsonProperty
-  def getMasterHostPort: String = masterHostPort
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdAnomalyStoreAccessor.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdAnomalyStoreAccessor.scala
deleted file mode 100644
index 676b09a..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdAnomalyStoreAccessor.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.db
-
-import org.apache.ambari.metrics.adservice.model.AnomalyType.AnomalyType
-import org.apache.ambari.metrics.adservice.model.MetricAnomalyInstance
-
-/**
-  * Trait for anomaly store accessor. (Phoenix)
-  */
-trait AdAnomalyStoreAccessor {
-
-  def initialize(): Unit
-
-  def getMetricAnomalies(anomalyType: AnomalyType,
-                         startTime: Long,
-                         endTime: Long,
-                         limit: Int) : List[MetricAnomalyInstance]
-
-  }
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdMetadataStoreAccessor.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdMetadataStoreAccessor.scala
deleted file mode 100644
index bcdb416..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdMetadataStoreAccessor.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.db
-
-import org.apache.ambari.metrics.adservice.metadata.MetricSourceDefinition
-
-/**
-  * Trait used to talk to the AD Metadata Store.
-  */
-trait AdMetadataStoreAccessor {
-
-  /**
-    * Return all saved component definitions from DB.
-    * @return
-    */
-  def getSavedInputDefinitions: List[MetricSourceDefinition]
-
-  /**
-    * Save a set of component definitions
-    * @param metricSourceDefinitions Set of component definitions
-    * @return Success / Failure
-    */
-  def saveInputDefinitions(metricSourceDefinitions: List[MetricSourceDefinition]) : Boolean
-
-  /**
-    * Save a component definition
-    * @param metricSourceDefinition component definition
-    * @return Success / Failure
-    */
-  def saveInputDefinition(metricSourceDefinition: MetricSourceDefinition) : Boolean
-
-  /**
-    * Delete a component definition
-    * @param definitionName component definition
-    * @return
-    */
-  def removeInputDefinition(definitionName: String) : Boolean
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdMetadataStoreAccessorImpl.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdMetadataStoreAccessorImpl.scala
deleted file mode 100644
index 7405459..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdMetadataStoreAccessorImpl.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.db
-
-import org.apache.ambari.metrics.adservice.metadata.MetricSourceDefinition
-import org.apache.commons.lang.SerializationUtils
-
-import com.google.inject.Inject
-
-/**
-  * Implementation of the AdMetadataStoreAccessor.
-  * Serves as the adaptor between metric definition service and LevelDB worlds.
-  */
-class AdMetadataStoreAccessorImpl extends AdMetadataStoreAccessor {
-
-  @Inject
-  var metadataDataSource: MetadataDatasource = _
-
-  @Inject
-  def this(metadataDataSource: MetadataDatasource) = {
-    this
-    this.metadataDataSource = metadataDataSource
-  }
-
-  /**
-    * Return all saved component definitions from DB.
-    *
-    * @return
-    */
-  override def getSavedInputDefinitions: List[MetricSourceDefinition] = {
-    val valuesFromStore : List[MetadataDatasource#Value] = metadataDataSource.getAll
-    val definitions = scala.collection.mutable.MutableList.empty[MetricSourceDefinition]
-
-    for (value : Array[Byte] <- valuesFromStore) {
-      val definition : MetricSourceDefinition = SerializationUtils.deserialize(value).asInstanceOf[MetricSourceDefinition]
-      if (definition != null) {
-        definitions.+=(definition)
-      }
-    }
-    definitions.toList
-  }
-
-  /**
-    * Save a set of component definitions
-    *
-    * @param metricSourceDefinitions Set of component definitions
-    * @return Success / Failure
-    */
-  override def saveInputDefinitions(metricSourceDefinitions: List[MetricSourceDefinition]): Boolean = {
-    for (definition <- metricSourceDefinitions) {
-      saveInputDefinition(definition)
-    }
-    true
-  }
-
-  /**
-    * Save a component definition
-    *
-    * @param metricSourceDefinition component definition
-    * @return Success / Failure
-    */
-  override def saveInputDefinition(metricSourceDefinition: MetricSourceDefinition): Boolean = {
-    val storeValue : MetadataDatasource#Value = SerializationUtils.serialize(metricSourceDefinition)
-    val storeKey : MetadataDatasource#Key = metricSourceDefinition.definitionName.getBytes()
-    metadataDataSource.put(storeKey, storeValue)
-    true
-  }
-
-  /**
-    * Delete a component definition
-    *
-    * @param definitionName component definition
-    * @return
-    */
-  override def removeInputDefinition(definitionName: String): Boolean = {
-    val storeKey : MetadataDatasource#Key = definitionName.getBytes()
-    metadataDataSource.delete(storeKey)
-    true
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdMetadataStoreConstants.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdMetadataStoreConstants.scala
deleted file mode 100644
index 3d273a3..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/AdMetadataStoreConstants.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.db
-
-object AdMetadataStoreConstants {
-
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-  /* Table Name constants */
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-  val METRIC_PROFILE_TABLE_NAME = "METRIC_DEFINITION"
-
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-  /* CREATE statement constants */
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-  val CREATE_METRIC_DEFINITION_TABLE: String = "CREATE TABLE IF NOT EXISTS %s (" +
-    "DEFINITION_NAME VARCHAR, " +
-    "DEFINITION_JSON VARCHAR, " +
-    "DEFINITION_SOURCE NUMBER, " +
-    "CREATED_TIME TIMESTAMP, " +
-    "UPDATED_TIME TIMESTAMP " +
-    "CONSTRAINT pk PRIMARY KEY (DEFINITION_NAME))"
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/ConnectionProvider.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/ConnectionProvider.scala
deleted file mode 100644
index cc02ed4..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/ConnectionProvider.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  *//**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.db
-
-import java.sql.Connection
-import java.sql.SQLException
-
-/**
-  * Provides a connection to the anomaly store.
-  */
-trait ConnectionProvider {
-  @throws[SQLException]
-  def getConnection: Connection
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/DefaultPhoenixDataSource.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/DefaultPhoenixDataSource.scala
deleted file mode 100644
index d9396de..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/DefaultPhoenixDataSource.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.db
-
-import org.apache.commons.logging.LogFactory
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.hbase.client.ConnectionFactory
-import org.apache.hadoop.hbase.client.HBaseAdmin
-import java.io.IOException
-import java.sql.Connection
-import java.sql.DriverManager
-import java.sql.SQLException
-
-object DefaultPhoenixDataSource {
-  private[db] val LOG = LogFactory.getLog(classOf[DefaultPhoenixDataSource])
-  private val ZOOKEEPER_CLIENT_PORT = "hbase.zookeeper.property.clientPort"
-  private val ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum"
-  private val ZNODE_PARENT = "zookeeper.znode.parent"
-  private val connectionUrl = "jdbc:phoenix:%s:%s:%s"
-}
-
-class DefaultPhoenixDataSource(var hbaseConf: Configuration) extends PhoenixConnectionProvider {
-
-  val zookeeperClientPort: String = hbaseConf.getTrimmed(DefaultPhoenixDataSource.ZOOKEEPER_CLIENT_PORT, "2181")
-  val zookeeperQuorum: String = hbaseConf.getTrimmed(DefaultPhoenixDataSource.ZOOKEEPER_QUORUM)
-  val znodeParent: String = hbaseConf.getTrimmed(DefaultPhoenixDataSource.ZNODE_PARENT, "/ams-hbase-unsecure")
-  final private var url : String = _
-
-  if (zookeeperQuorum == null || zookeeperQuorum.isEmpty) {
-    throw new IllegalStateException("Unable to find Zookeeper quorum to access HBase store using Phoenix.")
-  }
-  url = String.format(DefaultPhoenixDataSource.connectionUrl, zookeeperQuorum, zookeeperClientPort, znodeParent)
-
-
-  /**
-    * Get HBaseAdmin for table ops.
-    *
-    * @return @HBaseAdmin
-    * @throws IOException
-    */
-  @throws[IOException]
-  override def getHBaseAdmin: HBaseAdmin = ConnectionFactory.createConnection(hbaseConf).getAdmin.asInstanceOf[HBaseAdmin]
-
-  /**
-    * Get JDBC connection to HBase store. Assumption is that the hbase
-    * configuration is present on the classpath and loaded by the caller into
-    * the Configuration object.
-    * Phoenix already caches the HConnection between the client and HBase
-    * cluster.
-    *
-    * @return @java.sql.Connection
-    */
-  @throws[SQLException]
-  override def getConnection: Connection = {
-    DefaultPhoenixDataSource.LOG.debug("Metric store connection url: " + url)
-    try DriverManager.getConnection(url)
-    catch {
-      case e: SQLException =>
-        DefaultPhoenixDataSource.LOG.warn("Unable to connect to HBase store using Phoenix.", e)
-        throw e
-    }
-  }
-
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/MetadataDatasource.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/MetadataDatasource.scala
deleted file mode 100644
index 7b223a2..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/MetadataDatasource.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.db
-
-trait MetadataDatasource {
-
-  type Key = Array[Byte]
-  type Value = Array[Byte]
-
-  /**
-    *  Idempotent call at the start of the application to initialize db
-    */
-  def initialize(): Unit
-
-  /**
-    * This function obtains the associated value to a key. It requires the (key-value) pair to be in the DataSource
-    *
-    * @param key
-    * @return the value associated with the passed key.
-    */
-  def apply(key: Key): Value = get(key).get
-
-  /**
-    * This function obtains the associated value to a key, if there exists one.
-    *
-    * @param key
-    * @return the value associated with the passed key.
-    */
-  def get(key: Key): Option[Value]
-
-  /**
-    * This function obtains all the values
-    *
-    * @return the list of values
-    */
-  def getAll: List[Value]
-
-  /**
-    * This function associates a key to a value, overwriting if necessary
-    */
-  def put(key: Key, value: Value): Unit
-
-  /**
-    * Delete key from the db
-    */
-  def delete(key: Key): Unit
-
-  /**
-    * This function updates the DataSource by deleting, updating and inserting new (key-value) pairs.
-    *
-    * @param toRemove which includes all the keys to be removed from the DataSource.
-    * @param toUpsert which includes all the (key-value) pairs to be inserted into the DataSource.
-    *                 If a key is already in the DataSource its value will be updated.
-    * @return the new DataSource after the removals and insertions were done.
-    */
-  def update(toRemove: Seq[Key], toUpsert: Seq[(Key, Value)]): Unit
-
-  /**
-    * This function closes the DataSource, without deleting the files used by it.
-    */
-  def close(): Unit
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/PhoenixAnomalyStoreAccessor.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/PhoenixAnomalyStoreAccessor.scala
deleted file mode 100644
index 53e6dee..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/PhoenixAnomalyStoreAccessor.scala
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.db
-
-import java.sql.{Connection, PreparedStatement, ResultSet, SQLException}
-import java.util.concurrent.TimeUnit.SECONDS
-
-import org.apache.ambari.metrics.adservice.app.AnomalyDetectionAppConfig
-import org.apache.ambari.metrics.adservice.configuration.HBaseConfiguration
-import org.apache.ambari.metrics.adservice.metadata.{MetricDefinitionService, MetricKey}
-import org.apache.ambari.metrics.adservice.model.AnomalyDetectionMethod.AnomalyDetectionMethod
-import org.apache.ambari.metrics.adservice.model.AnomalyType.AnomalyType
-import org.apache.ambari.metrics.adservice.model._
-import org.apache.hadoop.hbase.util.RetryCounterFactory
-import org.slf4j.{Logger, LoggerFactory}
-
-import com.google.inject.Inject
-
-/**
-  * Phoenix query handler class.
-  */
-class PhoenixAnomalyStoreAccessor extends AdAnomalyStoreAccessor {
-
-  @Inject
-  var configuration: AnomalyDetectionAppConfig = _
-
-  @Inject
-  var metricDefinitionService: MetricDefinitionService = _
-
-  var datasource: PhoenixConnectionProvider = _
-  val LOG : Logger = LoggerFactory.getLogger(classOf[PhoenixAnomalyStoreAccessor])
-
-  @Override
-  def initialize(): Unit = {
-
-    datasource = new DefaultPhoenixDataSource(HBaseConfiguration.getHBaseConf)
-    val retryCounterFactory = new RetryCounterFactory(10, SECONDS.toMillis(3).toInt)
-
-    val ttl = configuration.getAdServiceConfiguration.getAnomalyDataTtl
-    try {
-      var conn : Connection = getConnectionRetryingOnException(retryCounterFactory)
-      var stmt = conn.createStatement
-
-      //Create Method parameters table.
-      val methodParametersSql = String.format(PhoenixQueryConstants.CREATE_METHOD_PARAMETERS_TABLE,
-        PhoenixQueryConstants.METHOD_PARAMETERS_TABLE_NAME)
-      stmt.executeUpdate(methodParametersSql)
-
-      //Create Point in Time anomaly table
-      val pointInTimeAnomalySql = String.format(PhoenixQueryConstants.CREATE_PIT_ANOMALY_METRICS_TABLE_SQL,
-        PhoenixQueryConstants.PIT_ANOMALY_METRICS_TABLE_NAME,
-        ttl.asInstanceOf[Object])
-      stmt.executeUpdate(pointInTimeAnomalySql)
-
-      //Create Trend Anomaly table
-      val trendAnomalySql = String.format(PhoenixQueryConstants.CREATE_TREND_ANOMALY_METRICS_TABLE_SQL,
-        PhoenixQueryConstants.TREND_ANOMALY_METRICS_TABLE_NAME,
-        ttl.asInstanceOf[Object])
-      stmt.executeUpdate(trendAnomalySql)
-
-      //Create model snapshot table.
-      val snapshotSql = String.format(PhoenixQueryConstants.CREATE_MODEL_SNAPSHOT_TABLE,
-        PhoenixQueryConstants.MODEL_SNAPSHOT)
-      stmt.executeUpdate(snapshotSql)
-
-      conn.commit()
-    } catch {
-      case e: SQLException => throw e
-    }
-  }
-
-  @Override
-  def getMetricAnomalies(anomalyType: AnomalyType, startTime: Long, endTime: Long, limit: Int) : List[MetricAnomalyInstance] = {
-    val anomalies = scala.collection.mutable.MutableList.empty[MetricAnomalyInstance]
-    val conn : Connection = getConnection
-    var stmt : PreparedStatement = null
-    var rs : ResultSet = null
-    val s : Season = Season(Range(-1,-1), SeasonType.DAY)
-
-    try {
-      stmt = prepareAnomalyMetricsGetSqlStatement(conn, anomalyType, startTime, endTime, limit)
-      rs = stmt.executeQuery
-      if (anomalyType.equals(AnomalyType.POINT_IN_TIME)) {
-        while (rs.next()) {
-          val uuid: Array[Byte] = rs.getBytes("METRIC_UUID")
-          val timestamp: Long = rs.getLong("ANOMALY_TIMESTAMP")
-          val metricValue: Double = rs.getDouble("METRIC_VALUE")
-          val methodType: AnomalyDetectionMethod = AnomalyDetectionMethod.withName(rs.getString("METHOD_NAME"))
-          val season: Season = Season.fromJson(rs.getString("SEASONAL_INFO"))
-          val anomalyScore: Double = rs.getDouble("ANOMALY_SCORE")
-          val modelSnapshot: String = rs.getString("MODEL_PARAMETERS")
-
-          val metricKey: MetricKey = metricDefinitionService.getMetricKeyFromUuid(uuid)
-          val anomalyInstance: MetricAnomalyInstance = new PointInTimeAnomalyInstance(metricKey, timestamp,
-            metricValue, methodType, anomalyScore, season, modelSnapshot)
-          anomalies.+=(anomalyInstance)
-        }
-      } else {
-        while (rs.next()) {
-          val uuid: Array[Byte] = rs.getBytes("METRIC_UUID")
-          val anomalyStart: Long = rs.getLong("ANOMALY_PERIOD_START")
-          val anomalyEnd: Long = rs.getLong("ANOMALY_PERIOD_END")
-          val referenceStart: Long = rs.getLong("TEST_PERIOD_START")
-          val referenceEnd: Long = rs.getLong("TEST_PERIOD_END")
-          val methodType: AnomalyDetectionMethod = AnomalyDetectionMethod.withName(rs.getString("METHOD_NAME"))
-          val season: Season = Season.fromJson(rs.getString("SEASONAL_INFO"))
-          val anomalyScore: Double = rs.getDouble("ANOMALY_SCORE")
-          val modelSnapshot: String = rs.getString("MODEL_PARAMETERS")
-
-          val metricKey: MetricKey = metricDefinitionService.getMetricKeyFromUuid(uuid)
-          val anomalyInstance: MetricAnomalyInstance = TrendAnomalyInstance(metricKey,
-            TimeRange(anomalyStart, anomalyEnd),
-            TimeRange(referenceStart, referenceEnd),
-            methodType, anomalyScore, season, modelSnapshot)
-          anomalies.+=(anomalyInstance)
-        }
-      }
-    } catch {
-      case e: SQLException => throw e
-    }
-
-    anomalies.toList
-  }
-
-  @throws[SQLException]
-  private def prepareAnomalyMetricsGetSqlStatement(connection: Connection, anomalyType: AnomalyType, startTime: Long, endTime: Long, limit: Int): PreparedStatement = {
-
-    val sb = new StringBuilder
-
-    if (anomalyType.equals(AnomalyType.POINT_IN_TIME)) {
-      sb.++=(String.format(PhoenixQueryConstants.GET_PIT_ANOMALY_METRIC_SQL, PhoenixQueryConstants.PIT_ANOMALY_METRICS_TABLE_NAME))
-    } else {
-      sb.++=(String.format(PhoenixQueryConstants.GET_TREND_ANOMALY_METRIC_SQL, PhoenixQueryConstants.TREND_ANOMALY_METRICS_TABLE_NAME))
-    }
-
-    sb.append(" LIMIT " + limit)
-    var stmt: java.sql.PreparedStatement = null
-    try {
-      stmt = connection.prepareStatement(sb.toString)
-
-      var pos = 1
-      stmt.setLong(pos, startTime)
-
-      pos += 1
-      stmt.setLong(pos, endTime)
-
-      stmt.setFetchSize(limit)
-
-    } catch {
-      case e: SQLException =>
-        if (stmt != null)
-          return stmt
-        throw e
-    }
-    stmt
-  }
-
-  @throws[SQLException]
-  private def getConnection: Connection = datasource.getConnection
-
-  @throws[SQLException]
-  @throws[InterruptedException]
-  private def getConnectionRetryingOnException (retryCounterFactory : RetryCounterFactory) : Connection = {
-    val retryCounter = retryCounterFactory.create
-    while(true) {
-      try
-        return getConnection
-      catch {
-        case e: SQLException =>
-          if (!retryCounter.shouldRetry) {
-            LOG.error("HBaseAccessor getConnection failed after " + retryCounter.getMaxAttempts + " attempts")
-            throw e
-          }
-      }
-      retryCounter.sleepUntilNextRetry()
-    }
-    null
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/PhoenixConnectionProvider.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/PhoenixConnectionProvider.scala
deleted file mode 100644
index 1faf1ba..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/PhoenixConnectionProvider.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  *//**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.db
-
-import org.apache.hadoop.hbase.client.HBaseAdmin
-import java.io.IOException
-
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  * <p/>
-  * http://www.apache.org/licenses/LICENSE-2.0
-  * <p/>
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-trait PhoenixConnectionProvider extends ConnectionProvider {
-  /**
-    * Get HBaseAdmin for the Phoenix connection
-    *
-    * @return
-    * @throws IOException
-    */
-    @throws[IOException]
-    def getHBaseAdmin: HBaseAdmin
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/PhoenixQueryConstants.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/PhoenixQueryConstants.scala
deleted file mode 100644
index d9774e0..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/db/PhoenixQueryConstants.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.db
-
-object PhoenixQueryConstants {
-
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-  /* Table Name constants */
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-  val METRIC_PROFILE_TABLE_NAME = "METRIC_PROFILE"
-  val METHOD_PARAMETERS_TABLE_NAME = "METHOD_PARAMETERS"
-  val PIT_ANOMALY_METRICS_TABLE_NAME = "PIT_METRIC_ANOMALIES"
-  val TREND_ANOMALY_METRICS_TABLE_NAME = "TREND_METRIC_ANOMALIES"
-  val MODEL_SNAPSHOT = "MODEL_SNAPSHOT"
-
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-  /* CREATE statement constants */
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-  val CREATE_METHOD_PARAMETERS_TABLE: String = "CREATE TABLE IF NOT EXISTS %s (" +
-    "METHOD_NAME VARCHAR, " +
-    "METHOD_TYPE VARCHAR, " +
-    "PARAMETERS VARCHAR " +
-    "CONSTRAINT pk PRIMARY KEY (METHOD_NAME)) " +
-    "DATA_BLOCK_ENCODING='FAST_DIFF', IMMUTABLE_ROWS=true, COMPRESSION='SNAPPY'"
-
-  val CREATE_PIT_ANOMALY_METRICS_TABLE_SQL: String = "CREATE TABLE IF NOT EXISTS %s (" +
-    "METRIC_UUID BINARY(20) NOT NULL, " +
-    "METHOD_NAME VARCHAR, " +
-    "ANOMALY_TIMESTAMP UNSIGNED_LONG NOT NULL, " +
-    "METRIC_VALUE DOUBLE, " +
-    "SEASONAL_INFO VARCHAR, " +
-    "ANOMALY_SCORE DOUBLE, " +
-    "MODEL_PARAMETERS VARCHAR, " +
-    "DETECTION_TIME UNSIGNED_LONG " +
-    "CONSTRAINT pk PRIMARY KEY (METRIC_UUID, METHOD_NAME, ANOMALY_TIMESTAMP)) " +
-    "DATA_BLOCK_ENCODING='FAST_DIFF', IMMUTABLE_ROWS=true, TTL=%s, COMPRESSION='SNAPPY'"
-
-  val CREATE_TREND_ANOMALY_METRICS_TABLE_SQL: String = "CREATE TABLE IF NOT EXISTS %s (" +
-    "METRIC_UUID BINARY(20) NOT NULL, " +
-    "METHOD_NAME VARCHAR, " +
-    "ANOMALY_PERIOD_START UNSIGNED_LONG NOT NULL, " +
-    "ANOMALY_PERIOD_END UNSIGNED_LONG NOT NULL, " +
-    "TEST_PERIOD_START UNSIGNED_LONG NOT NULL, " +
-    "TEST_PERIOD_END UNSIGNED_LONG NOT NULL, " +
-    "SEASONAL_INFO VARCHAR, " +
-    "ANOMALY_SCORE DOUBLE, " +
-    "MODEL_PARAMETERS VARCHAR, " +
-    "DETECTION_TIME UNSIGNED_LONG " +
-    "CONSTRAINT pk PRIMARY KEY (METRIC_UUID, METHOD_NAME, ANOMALY_PERIOD_START, ANOMALY_PERIOD_END, TEST_PERIOD_START, TEST_PERIOD_END)) " +
-    "DATA_BLOCK_ENCODING='FAST_DIFF', IMMUTABLE_ROWS=true, TTL=%s, COMPRESSION='SNAPPY'"
-
-  val CREATE_MODEL_SNAPSHOT_TABLE: String = "CREATE TABLE IF NOT EXISTS %s (" +
-    "METRIC_UUID BINARY(20) NOT NULL, " +
-    "METHOD_NAME VARCHAR, " +
-    "METHOD_TYPE VARCHAR, " +
-    "PARAMETERS VARCHAR, " +
-    "SNAPSHOT_TIME UNSIGNED_LONG NOT NULL " +
-    "CONSTRAINT pk PRIMARY KEY (METRIC_UUID, METHOD_NAME, SNAPSHOT_TIME)) " +
-    "DATA_BLOCK_ENCODING='FAST_DIFF', IMMUTABLE_ROWS=true, COMPRESSION='SNAPPY'"
-
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-  /* UPSERT statement constants */
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-  val UPSERT_METHOD_PARAMETERS_SQL: String = "UPSERT INTO %s (METHOD_NAME, METHOD_TYPE, PARAMETERS) VALUES (?,?,?)"
-
-  val UPSERT_PIT_ANOMALY_METRICS_SQL: String = "UPSERT INTO %s (METRIC_UUID, ANOMALY_TIMESTAMP, METRIC_VALUE, METHOD_NAME, " +
-    "SEASONAL_INFO, ANOMALY_SCORE, MODEL_PARAMETERS, DETECTION_TIME) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"
-
-  val UPSERT_TREND_ANOMALY_METRICS_SQL: String = "UPSERT INTO %s (METRIC_UUID, ANOMALY_PERIOD_START, ANOMALY_PERIOD_END, " +
-    "TEST_PERIOD_START, TEST_PERIOD_END, METHOD_NAME, ANOMALY_SCORE, MODEL_PARAMETERS, DETECTION_TIME) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"
-
-  val UPSERT_MODEL_SNAPSHOT_SQL: String = "UPSERT INTO %s (METRIC_UUID, METHOD_NAME, METHOD_TYPE, PARAMETERS) VALUES (?, ?, ?, ?)"
-
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-  /* GET statement constants */
-  //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-  val GET_METHOD_PARAMETERS_SQL: String = "SELECT METHOD_NAME, METHOD_TYPE, PARAMETERS FROM %s WHERE METHOD_NAME = %s"
-
-  val GET_PIT_ANOMALY_METRIC_SQL: String = "SELECT METRIC_UUID, ANOMALY_TIMESTAMP, METRIC_VALUE, METHOD_NAME, SEASONAL_INFO, " +
-    "ANOMALY_SCORE, MODEL_PARAMETERS, DETECTION_TIME FROM %s WHERE ANOMALY_TIMESTAMP > ? AND ANOMALY_TIMESTAMP <= ? " +
-    "ORDER BY ANOMALY_SCORE DESC"
-
-  val GET_TREND_ANOMALY_METRIC_SQL: String = "SELECT METRIC_UUID, ANOMALY_PERIOD_START, ANOMALY_PERIOD_END, TEST_PERIOD_START, " +
-    "TEST_PERIOD_END, METHOD_NAME, SEASONAL_INFO, ANOMALY_SCORE, MODEL_PARAMETERS, DETECTION_TIME FROM %s WHERE ANOMALY_PERIOD_END > ? " +
-    "AND ANOMALY_PERIOD_END <= ? ORDER BY ANOMALY_SCORE DESC"
-
-  val GET_MODEL_SNAPSHOT_SQL: String = "SELECT METRIC_UUID, METHOD_NAME, METHOD_TYPE, PARAMETERS FROM %s WHERE UUID = %s AND METHOD_NAME = %s"
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/leveldb/LevelDBDatasource.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/leveldb/LevelDBDatasource.scala
deleted file mode 100644
index 49ef272..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/leveldb/LevelDBDatasource.scala
+++ /dev/null
@@ -1,128 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.leveldb
-
-import java.io.File
-
-import javax.inject.Inject
-
-import org.apache.ambari.metrics.adservice.app.AnomalyDetectionAppConfig
-import org.apache.ambari.metrics.adservice.configuration.MetricDefinitionDBConfiguration
-import org.apache.ambari.metrics.adservice.db.MetadataDatasource
-import org.iq80.leveldb.{DB, Options, WriteOptions}
-import org.iq80.leveldb.impl.Iq80DBFactory
-
-import com.google.inject.Singleton
-
-@Singleton
-class LevelDBDataSource() extends MetadataDatasource {
-
-  private var db: DB = _
-  @volatile var isInitialized: Boolean = false
-
-  var appConfig: AnomalyDetectionAppConfig = _
-
-  @Inject
-  def this(appConfig: AnomalyDetectionAppConfig) = {
-    this
-    this.appConfig = appConfig
-  }
-
-  override def initialize(): Unit = {
-    if (isInitialized) return 
-
-    val configuration: MetricDefinitionDBConfiguration = appConfig.getMetricDefinitionDBConfiguration
-
-    db = createDB(new LevelDbConfig {
-      override val createIfMissing: Boolean = true
-      override val verifyChecksums: Boolean = configuration.getVerifyChecksums
-      override val paranoidChecks: Boolean = configuration.getPerformParanoidChecks
-      override val path: String = configuration.getDbDirPath
-    })
-    isInitialized = true
-  }
-
-  private def createDB(levelDbConfig: LevelDbConfig): DB = {
-    import levelDbConfig._
-
-    val options = new Options()
-      .createIfMissing(createIfMissing)
-      .paranoidChecks(paranoidChecks) // raise an error as soon as it detects an internal corruption
-      .verifyChecksums(verifyChecksums) // force checksum verification of all data that is read from the file system on behalf of a particular read
-
-    Iq80DBFactory.factory.open(new File(path), options)
-  }
-
-  override def close(): Unit = {
-    db.close()
-  }
-
-  /**
-    * This function obtains the associated value to a key, if there exists one.
-    *
-    * @param key
-    * @return the value associated with the passed key.
-    */
-  override def get(key: Key): Option[Value] = Option(db.get(key))
-
-  /**
-    * This function obtains all the values
-    *
-    * @return the list of values
-    */
-  def getAll: List[Value] = {
-    val values = scala.collection.mutable.MutableList.empty[Value]
-    val iterator = db.iterator()
-    iterator.seekToFirst()
-    while (iterator.hasNext) {
-      val entry: java.util.Map.Entry[Key, Value] = iterator.next()
-      values.+=(entry.getValue)
-    }
-    values.toList
-  }
-
-  /**
-    * This function updates the DataSource by deleting, updating and inserting new (key-value) pairs.
-    *
-    * @param toRemove which includes all the keys to be removed from the DataSource.
-    * @param toUpsert which includes all the (key-value) pairs to be inserted into the DataSource.
-    *                 If a key is already in the DataSource its value will be updated.
-    */
-  override def update(toRemove: Seq[Key], toUpsert: Seq[(Key, Value)]): Unit = {
-    val batch = db.createWriteBatch()
-    toRemove.foreach { key => batch.delete(key) }
-    toUpsert.foreach { item => batch.put(item._1, item._2) }
-    db.write(batch, new WriteOptions())
-  }
-
-  override def put(key: Key, value: Value): Unit = {
-    db.put(key, value)
-  }
-
-  override def delete(key: Key): Unit = {
-    db.delete(key)
-  }
-}
-
-trait LevelDbConfig {
-  val createIfMissing: Boolean
-  val paranoidChecks: Boolean
-  val verifyChecksums: Boolean
-  val path: String
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/ADMetadataProvider.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/ADMetadataProvider.scala
deleted file mode 100644
index c277221..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/ADMetadataProvider.scala
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import javax.ws.rs.core.Response
-
-import org.apache.ambari.metrics.adservice.configuration.MetricCollectorConfiguration
-import org.apache.commons.lang.StringUtils
-import org.slf4j.{Logger, LoggerFactory}
-
-import com.fasterxml.jackson.databind.ObjectMapper
-import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
-
-import scalaj.http.{Http, HttpRequest, HttpResponse}
-
-/**
-  * Class to invoke Metrics Collector metadata API.
-  * TODO : Instantiate a sync thread that regularly updates the internal maps by reading off AMS metadata.
-  */
-class ADMetadataProvider extends MetricMetadataProvider {
-
-  var metricCollectorHosts: Array[String] = Array.empty[String]
-  var metricCollectorPort: String = _
-  var metricCollectorProtocol: String = _
-  var metricMetadataPath: String = "/v1/timeline/metrics/metadata/keys"
-  val LOG : Logger = LoggerFactory.getLogger(classOf[ADMetadataProvider])
-
-  val connectTimeout: Int = 10000
-  val readTimeout: Int = 10000
-  //TODO: Add retries for metrics collector GET call.
-  //val retries: Long = 5
-
-  def this(configuration: MetricCollectorConfiguration) {
-    this
-    if (StringUtils.isNotEmpty(configuration.getHosts)) {
-      metricCollectorHosts = configuration.getHosts.split(",")
-    }
-    metricCollectorPort = configuration.getPort
-    metricCollectorProtocol = configuration.getProtocol
-    metricMetadataPath = configuration.getMetadataEndpoint
-  }
-
-  override def getMetricKeysForDefinitions(metricSourceDefinition: MetricSourceDefinition): Set[MetricKey] = {
-
-    val numDefinitions: Int = metricSourceDefinition.metricDefinitions.size
-    val metricKeySet: scala.collection.mutable.Set[MetricKey] = scala.collection.mutable.Set.empty[MetricKey]
-
-    for (metricDef <- metricSourceDefinition.metricDefinitions) {
-      if (metricDef.isValid) { //Skip requesting metric keys for invalid definitions.
-        for (host <- metricCollectorHosts) {
-          val metricKeys: Set[MetricKey] = getKeysFromMetricsCollector(metricCollectorProtocol, host, metricCollectorPort, metricMetadataPath, metricDef)
-          if (metricKeys != null) {
-            metricKeySet.++=(metricKeys)
-          }
-        }
-      }
-    }
-    metricKeySet.toSet
-  }
-
-  /**
-    *
-    * @param protocol
-    * @param host
-    * @param port
-    * @param path
-    * @param metricDefinition
-    * @return
-    */
-  def getKeysFromMetricsCollector(protocol: String, host: String, port: String, path: String, metricDefinition: MetricDefinition): Set[MetricKey] = {
-
-    val url: String = protocol + "://" + host + ":" + port + path
-    val mapper = new ObjectMapper() with ScalaObjectMapper
-
-    if (metricDefinition.hosts == null || metricDefinition.hosts.isEmpty) {
-      val request: HttpRequest = Http(url)
-        .param("metricName", metricDefinition.metricName)
-        .param("appId", metricDefinition.appId)
-      makeHttpGetCall(request, mapper)
-    } else {
-      val metricKeySet: scala.collection.mutable.Set[MetricKey] = scala.collection.mutable.Set.empty[MetricKey]
-
-      for (h <- metricDefinition.hosts) {
-        val request: HttpRequest = Http(url)
-          .param("metricName", metricDefinition.metricName)
-          .param("appId", metricDefinition.appId)
-          .param("hostname", h)
-
-        val metricKeys = makeHttpGetCall(request, mapper)
-        metricKeySet.++=(metricKeys)
-      }
-      metricKeySet.toSet
-    }
-  }
-
-  private def makeHttpGetCall(request: HttpRequest, mapper: ObjectMapper): Set[MetricKey] = {
-
-    try {
-      val result: HttpResponse[String] = request.asString
-      if (result.code == Response.Status.OK.getStatusCode) {
-        LOG.info("Successfully fetched metric keys from metrics collector")
-        val metricKeySet: java.util.Set[java.util.Map[String, String]] = mapper.readValue(result.body,
-          classOf[java.util.Set[java.util.Map[String, String]]])
-        getMetricKeys(metricKeySet)
-      } else {
-        LOG.error("Got an error when trying to fetch metric key from metrics collector. Code = " + result.code + ", Message = " + result.body)
-      }
-    } catch {
-      case _: java.io.IOException | _: java.net.SocketTimeoutException => LOG.error("Unable to fetch metric keys from Metrics collector for : " + request.toString)
-    }
-    Set.empty[MetricKey]
-  }
-
-
-  def getMetricKeys(timelineMetricKeys: java.util.Set[java.util.Map[String, String]]): Set[MetricKey] = {
-    val metricKeySet: scala.collection.mutable.Set[MetricKey] = scala.collection.mutable.Set.empty[MetricKey]
-    val iter = timelineMetricKeys.iterator()
-    while (iter.hasNext) {
-      val timelineMetricKey: java.util.Map[String, String] = iter.next()
-      val metricKey: MetricKey = MetricKey(
-        timelineMetricKey.get("metricName"),
-        timelineMetricKey.get("appId"),
-        timelineMetricKey.get("instanceId"),
-        timelineMetricKey.get("hostname"),
-        timelineMetricKey.get("uuid").getBytes())
-
-      metricKeySet.add(metricKey)
-    }
-    metricKeySet.toSet
-  }
-
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/InputMetricDefinitionParser.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/InputMetricDefinitionParser.scala
deleted file mode 100644
index 3c8ea84..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/InputMetricDefinitionParser.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import java.io.File
-
-import org.apache.ambari.metrics.adservice.app.ADServiceScalaModule
-
-import com.fasterxml.jackson.databind.ObjectMapper
-import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
-
-object InputMetricDefinitionParser {
-
-  def parseInputDefinitionsFromDirectory(directory: String): List[MetricSourceDefinition] = {
-
-    if (directory == null) {
-      return List.empty[MetricSourceDefinition]
-    }
-    val mapper = new ObjectMapper() with ScalaObjectMapper
-    mapper.registerModule(new ADServiceScalaModule)
-    val metricSourceDefinitions: scala.collection.mutable.MutableList[MetricSourceDefinition] =
-      scala.collection.mutable.MutableList.empty[MetricSourceDefinition]
-
-    for (file <- getFilesInDirectory(directory)) {
-      val source = scala.io.Source.fromFile(file)
-      val lines = try source.mkString finally source.close()
-      val definition: MetricSourceDefinition = mapper.readValue[MetricSourceDefinition](lines)
-      if (definition != null) {
-        metricSourceDefinitions.+=(definition)
-      }
-    }
-    metricSourceDefinitions.toList
-  }
-
-  private def getFilesInDirectory(directory: String): List[File] = {
-    val dir = new File(directory)
-    if (dir.exists && dir.isDirectory) {
-      dir.listFiles.filter(_.isFile).toList
-    } else {
-      List[File]()
-    }
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinition.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinition.scala
deleted file mode 100644
index c668dfa..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinition.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import org.apache.commons.lang3.StringUtils
-
-import com.fasterxml.jackson.annotation.JsonIgnore
-/*
-   {
-       "metric-name": "mem_free",
-       "appId" : "HOST",
-       "hosts" : ["h1","h2"],
-       "metric-description" : "Free memory on a Host.",
-       "troubleshooting-info" : "Sudden drop / hike in free memory on a host.",
-       "static-threshold" : 10,
-       “app-id” : “HOST”
-}
- */
-
-@SerialVersionUID(1002L)
-class MetricDefinition extends Serializable {
-
-  var metricName: String = _
-  var appId: String = _
-  var hosts: List[String] = List.empty[String]
-  var metricDescription: String = ""
-  var troubleshootingInfo: String = ""
-  var staticThreshold: Double = _
-
-  //A Metric definition is valid if we can resolve a metricName and appId (defined or inherited) at runtime)
-  private var valid : Boolean = true
-
-  def this(metricName: String,
-           appId: String,
-           hosts: List[String],
-           metricDescription: String,
-           troubleshootingInfo: String,
-           staticThreshold: Double) = {
-    this
-    this.metricName = metricName
-    this.appId = appId
-    this.hosts = hosts
-    this.metricDescription = metricDescription
-    this.troubleshootingInfo = troubleshootingInfo
-    this.staticThreshold = staticThreshold
-  }
-
-  @Override
-  override def equals(obj: scala.Any): Boolean = {
-
-    if (obj == null || (getClass ne obj.getClass))
-      return false
-
-    val that = obj.asInstanceOf[MetricDefinition]
-
-    if (!(metricName == that.metricName))
-      return false
-
-    if (StringUtils.isNotEmpty(appId)) {
-      appId == that.appId
-    }
-    else {
-      StringUtils.isEmpty(that.appId)
-    }
-  }
-
-  def isValid: Boolean = {
-    valid
-  }
-
-  def makeInvalid() : Unit = {
-    valid = false
-  }
-}
-
-object MetricDefinition {
-
-  def apply(metricName: String,
-            appId: String,
-            hosts: List[String],
-            metricDescription: String,
-            troubleshootingInfo: String,
-            staticThreshold: Double): MetricDefinition =
-    new MetricDefinition(metricName, appId, hosts, metricDescription, troubleshootingInfo, staticThreshold)
-
-  def apply(metricName: String, appId: String, hosts: List[String]): MetricDefinition =
-    new MetricDefinition(metricName, appId, hosts, null, null, -1)
-
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinitionService.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinitionService.scala
deleted file mode 100644
index 52ce39e..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinitionService.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import org.apache.ambari.metrics.adservice.service.AbstractADService
-
-trait MetricDefinitionService extends AbstractADService{
-
-  /**
-    * Given a 'UUID', return the metric key associated with it.
-    * @param uuid UUID
-    * @return
-    */
-  def getMetricKeyFromUuid(uuid: Array[Byte]) : MetricKey
-
-  /**
-    * Return all the definitions being tracked.
-    * @return Map of Metric Source Definition name to Metric Source Definition.
-    */
-  def getDefinitions: List[MetricSourceDefinition]
-
-  /**
-    * Given a component definition name, return the definition associated with it.
-    * @param name component definition name
-    * @return
-    */
-  def getDefinitionByName(name: String) : MetricSourceDefinition
-
-  /**
-    * Add a new definition.
-    * @param definition component definition JSON
-    * @return
-    */
-  def addDefinition(definition: MetricSourceDefinition) : Boolean
-
-  /**
-    * Update a component definition by name. Only definitions which were added by API can be modified through API.
-    * @param definition component definition name
-    * @return
-    */
-  def updateDefinition(definition: MetricSourceDefinition) : Boolean
-
-  /**
-    * Delete a component definition by name. Only definitions which were added by API can be deleted through API.
-    * @param name component definition name
-    * @return
-    */
-  def deleteDefinitionByName(name: String) : Boolean
-
-  /**
-    * Given an appId, return set of definitions that are tracked for that appId.
-    * @param appId component definition appId
-    * @return
-    */
-  def getDefinitionByAppId(appId: String) : List[MetricSourceDefinition]
-
-  /**
-    * Return the mapping between definition name to set of metric keys.
-    * @return Map of Metric Source Definition to set of metric keys associated with it.
-    */
-  def getMetricKeys:  Map[String, Set[MetricKey]]
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinitionServiceImpl.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinitionServiceImpl.scala
deleted file mode 100644
index b9b4a7c..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinitionServiceImpl.scala
+++ /dev/null
@@ -1,242 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import org.apache.ambari.metrics.adservice.app.AnomalyDetectionAppConfig
-import org.apache.ambari.metrics.adservice.db.AdMetadataStoreAccessor
-import org.slf4j.{Logger, LoggerFactory}
-
-import com.google.inject.{Inject, Singleton}
-
-@Singleton
-class MetricDefinitionServiceImpl extends MetricDefinitionService {
-
-  val LOG : Logger = LoggerFactory.getLogger(classOf[MetricDefinitionServiceImpl])
-
-  var adMetadataStoreAccessor: AdMetadataStoreAccessor = _
-  var configuration: AnomalyDetectionAppConfig = _
-  var metricMetadataProvider: MetricMetadataProvider = _
-
-  val metricSourceDefinitionMap: scala.collection.mutable.Map[String, MetricSourceDefinition] = scala.collection.mutable.Map()
-  val metricDefinitionMetricKeyMap: scala.collection.mutable.Map[MetricSourceDefinition, Set[MetricKey]] = scala.collection.mutable.Map()
-  val metricKeys: scala.collection.mutable.Set[MetricKey] = scala.collection.mutable.Set.empty[MetricKey]
-
-  @Inject
-  def this (anomalyDetectionAppConfig: AnomalyDetectionAppConfig, metadataStoreAccessor: AdMetadataStoreAccessor) = {
-    this ()
-    adMetadataStoreAccessor = metadataStoreAccessor
-    configuration = anomalyDetectionAppConfig
-  }
-
-  @Override
-  def initialize() : Unit = {
-    LOG.info("Initializing Metric Definition Service...")
-
-    //Initialize Metric Metadata Provider
-    metricMetadataProvider = new ADMetadataProvider(configuration.getMetricCollectorConfiguration)
-
-    //Load definitions from metadata store
-    val definitionsFromStore: List[MetricSourceDefinition] = adMetadataStoreAccessor.getSavedInputDefinitions
-    for (definition <- definitionsFromStore) {
-      sanitizeMetricSourceDefinition(definition)
-    }
-
-    //Load definitions from configs
-    val definitionsFromConfig: List[MetricSourceDefinition] = getInputDefinitionsFromConfig
-    for (definition <- definitionsFromConfig) {
-      sanitizeMetricSourceDefinition(definition)
-    }
-
-    //Union the 2 sources, with DB taking precedence.
-    //Save new definition list to DB.
-    metricSourceDefinitionMap.++=(combineDefinitionSources(definitionsFromConfig, definitionsFromStore))
-
-    //Reach out to AMS Metadata and get Metric Keys. Pass in MSD and get back Set<MK>
-    for (definition <- metricSourceDefinitionMap.values) {
-      val keys: Set[MetricKey] = metricMetadataProvider.getMetricKeysForDefinitions(definition)
-      metricDefinitionMetricKeyMap(definition) = keys
-      metricKeys.++=(keys)
-    }
-
-    LOG.info("Successfully initialized Metric Definition Service.")
-  }
-
-  def getMetricKeyFromUuid(uuid: Array[Byte]): MetricKey = {
-    var key: MetricKey = null
-    for (metricKey <- metricKeys) {
-      if (metricKey.uuid.sameElements(uuid)) {
-        key = metricKey
-      }
-    }
-    key
-  }
-
-  @Override
-  def getDefinitions: List[MetricSourceDefinition] = {
-    metricSourceDefinitionMap.values.toList
-  }
-
-  @Override
-  def getDefinitionByName(name: String): MetricSourceDefinition = {
-    if (!metricSourceDefinitionMap.contains(name)) {
-      LOG.warn("Metric Source Definition with name " + name + " not found")
-      null
-    } else {
-      metricSourceDefinitionMap.apply(name)
-    }
-  }
-
-  @Override
-  def addDefinition(definition: MetricSourceDefinition): Boolean = {
-    if (metricSourceDefinitionMap.contains(definition.definitionName)) {
-      LOG.info("Definition with name " + definition.definitionName + " already present.")
-      return false
-    }
-    definition.definitionSource = MetricSourceDefinitionType.API
-
-    val success: Boolean = adMetadataStoreAccessor.saveInputDefinition(definition)
-    if (success) {
-      metricSourceDefinitionMap += definition.definitionName -> definition
-      val keys: Set[MetricKey] = metricMetadataProvider.getMetricKeysForDefinitions(definition)
-      metricDefinitionMetricKeyMap(definition) = keys
-      metricKeys.++=(keys)
-      LOG.info("Successfully created metric source definition : " + definition.definitionName)
-    }
-    success
-  }
-
-  @Override
-  def updateDefinition(definition: MetricSourceDefinition): Boolean = {
-    if (!metricSourceDefinitionMap.contains(definition.definitionName)) {
-      LOG.warn("Metric Source Definition with name " + definition.definitionName + " not found")
-      return false
-    }
-
-    if (metricSourceDefinitionMap.apply(definition.definitionName).definitionSource != MetricSourceDefinitionType.API) {
-      return false
-    }
-    definition.definitionSource = MetricSourceDefinitionType.API
-
-    val success: Boolean = adMetadataStoreAccessor.saveInputDefinition(definition)
-    if (success) {
-      metricSourceDefinitionMap += definition.definitionName -> definition
-      val keys: Set[MetricKey] = metricMetadataProvider.getMetricKeysForDefinitions(definition)
-      metricDefinitionMetricKeyMap(definition) = keys
-      metricKeys.++=(keys)
-      LOG.info("Successfully updated metric source definition : " + definition.definitionName)
-    }
-    success
-  }
-
-  @Override
-  def deleteDefinitionByName(name: String): Boolean = {
-    if (!metricSourceDefinitionMap.contains(name)) {
-      LOG.warn("Metric Source Definition with name " + name + " not found")
-      return false
-    }
-
-    val definition : MetricSourceDefinition = metricSourceDefinitionMap.apply(name)
-    if (definition.definitionSource != MetricSourceDefinitionType.API) {
-      LOG.warn("Cannot delete metric source definition which was not created through API.")
-      return false
-    }
-
-    val success: Boolean = adMetadataStoreAccessor.removeInputDefinition(name)
-    if (success) {
-      metricSourceDefinitionMap -= definition.definitionName
-      metricKeys.--=(metricDefinitionMetricKeyMap.apply(definition))
-      metricDefinitionMetricKeyMap -= definition
-      LOG.info("Successfully deleted metric source definition : " + name)
-    }
-    success
-  }
-
-  @Override
-  def getDefinitionByAppId(appId: String): List[MetricSourceDefinition] = {
-
-    val defList : List[MetricSourceDefinition] = metricSourceDefinitionMap.values.toList
-    defList.filter(_.appId == appId)
-  }
-
-  def combineDefinitionSources(configDefinitions: List[MetricSourceDefinition], dbDefinitions: List[MetricSourceDefinition])
-  : Map[String, MetricSourceDefinition] = {
-
-    var combinedDefinitionMap: scala.collection.mutable.Map[String, MetricSourceDefinition] =
-      scala.collection.mutable.Map.empty[String, MetricSourceDefinition]
-
-    for (definitionFromDb <- dbDefinitions) {
-      combinedDefinitionMap(definitionFromDb.definitionName) = definitionFromDb
-    }
-
-    for (definition <- configDefinitions) {
-      if (!dbDefinitions.contains(definition)) {
-        adMetadataStoreAccessor.saveInputDefinition(definition)
-        combinedDefinitionMap(definition.definitionName) = definition
-      }
-    }
-    combinedDefinitionMap.toMap
-  }
-
-  def getInputDefinitionsFromConfig: List[MetricSourceDefinition] = {
-    val configDirectory = configuration.getMetricDefinitionServiceConfiguration.getInputDefinitionDirectory
-    InputMetricDefinitionParser.parseInputDefinitionsFromDirectory(configDirectory)
-  }
-
-  def setAdMetadataStoreAccessor (adMetadataStoreAccessor: AdMetadataStoreAccessor) : Unit = {
-    this.adMetadataStoreAccessor = adMetadataStoreAccessor
-  }
-
-  /**
-    * Look into the Metric Definitions inside a Metric Source definition, and push down source level appId &
-    * hosts to Metric definition if they do not have an override.
-    * @param metricSourceDefinition Input Metric Source Definition
-    */
-  def sanitizeMetricSourceDefinition(metricSourceDefinition: MetricSourceDefinition): Unit = {
-    val sourceLevelAppId: String = metricSourceDefinition.appId
-    val sourceLevelHostList: List[String] = metricSourceDefinition.hosts
-
-    for (metricDef <- metricSourceDefinition.metricDefinitions.toList) {
-      if (metricDef.appId == null) {
-        if (sourceLevelAppId == null || sourceLevelAppId.isEmpty) {
-          metricDef.makeInvalid()
-        } else {
-          metricDef.appId = sourceLevelAppId
-        }
-      }
-
-      if (metricDef.isValid && (metricDef.hosts == null || metricDef.hosts.isEmpty)) {
-        if (sourceLevelHostList != null && sourceLevelHostList.nonEmpty) {
-          metricDef.hosts = sourceLevelHostList
-        }
-      }
-    }
-  }
-
-  /**
-    * Return the mapping between definition name to set of metric keys.
-    *
-    * @return Map of Metric Source Definition to set of metric keys associated with it.
-    */
-  override def getMetricKeys: Map[String, Set[MetricKey]] = {
-    val metricKeyMap: scala.collection.mutable.Map[String, Set[MetricKey]] = scala.collection.mutable.Map()
-    for (definition <- metricSourceDefinitionMap.values) {
-      metricKeyMap(definition.definitionName) = metricDefinitionMetricKeyMap.apply(definition)
-    }
-    metricKeyMap.toMap
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricKey.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricKey.scala
deleted file mode 100644
index 65c496e..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricKey.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import javax.xml.bind.annotation.XmlRootElement
-
-@XmlRootElement
-case class MetricKey (metricName: String, appId: String, instanceId: String, hostname: String, uuid: Array[Byte]) {
-
-  @Override
-  override def toString: String = {
-  "MetricName=" + metricName + ",App=" + appId + ",InstanceId=" + instanceId + ",Host=" + hostname
-  }
-
-  @Override
-  override def equals(obj: scala.Any): Boolean = {
-
-    if (obj == null || (getClass ne obj.getClass))
-      return false
-
-    val that = obj.asInstanceOf[MetricKey]
-
-    if (!(metricName == that.metricName))
-      return false
-
-    if (!(appId == that.appId))
-      return false
-
-    if (!(instanceId == that.instanceId))
-      return false
-
-    if (!(hostname == that.hostname))
-      return false
-
-    true
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricMetadataProvider.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricMetadataProvider.scala
deleted file mode 100644
index b5ba15e..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricMetadataProvider.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-/**
-  * Metadata provider for maintaining the metric information in the Metric Definition Service.
-  */
-trait MetricMetadataProvider {
-
-  /**
-    * Return the set of Metric Keys for a given component definition.
-    * @param metricSourceDefinition component definition
-    * @return
-    */
-  def getMetricKeysForDefinitions(metricSourceDefinition: MetricSourceDefinition): Set[MetricKey]
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricSourceDefinition.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricSourceDefinition.scala
deleted file mode 100644
index 47b1499..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricSourceDefinition.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import javax.xml.bind.annotation.XmlRootElement
-
-import org.apache.ambari.metrics.adservice.metadata.MetricSourceDefinitionType.MetricSourceDefinitionType
-import org.apache.ambari.metrics.adservice.model.AnomalyType.AnomalyType
-
-/*
-{
- "definition-name": "host-memory",
- "app-id" : "HOST",
- "hosts" : [“c6401.ambari.apache.org”],
- "metric-definitions" : [
-   {
-       "metric-name": "mem_free",
-       "metric-description" : "Free memory on a Host.",
-       "troubleshooting-info" : "Sudden drop / hike in free memory on a host.",
-       "static-threshold" : 10,
-       “app-id” : “HOST”
-}   ],
-
- "related-definition-names" : ["host-cpu", “host-network”],
- “anomaly-detection-subsystems” : [“point-in-time”, “trend”]
-}
-*/
-
-
-@SerialVersionUID(10001L)
-@XmlRootElement
-class MetricSourceDefinition extends Serializable{
-
-  var definitionName: String = _
-  var appId: String = _
-  var definitionSource: MetricSourceDefinitionType = MetricSourceDefinitionType.CONFIG
-  var hosts: List[String] = List.empty[String]
-  var relatedDefinitions: List[String] = List.empty[String]
-  var associatedAnomalySubsystems: List[AnomalyType] = List.empty[AnomalyType]
-
-  var metricDefinitions: scala.collection.mutable.MutableList[MetricDefinition] =
-    scala.collection.mutable.MutableList.empty[MetricDefinition]
-
-  def this(definitionName: String, appId: String, source: MetricSourceDefinitionType) = {
-    this
-    this.definitionName = definitionName
-    this.appId = appId
-    this.definitionSource = source
-  }
-
-  def addMetricDefinition(metricDefinition: MetricDefinition): Unit = {
-    if (!metricDefinitions.contains(metricDefinition)) {
-      metricDefinitions.+=(metricDefinition)
-    }
-  }
-
-  def removeMetricDefinition(metricDefinition: MetricDefinition): Unit = {
-    metricDefinitions = metricDefinitions.filter(_ != metricDefinition)
-  }
-
-  @Override
-  override def equals(obj: scala.Any): Boolean = {
-
-    if (obj == null) {
-      return false
-    }
-    val that = obj.asInstanceOf[MetricSourceDefinition]
-    definitionName.equals(that.definitionName)
-  }
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricSourceDefinitionType.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricSourceDefinitionType.scala
deleted file mode 100644
index 04ff95b..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/metadata/MetricSourceDefinitionType.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import javax.xml.bind.annotation.XmlRootElement
-
-@XmlRootElement
-object MetricSourceDefinitionType extends Enumeration{
-  type MetricSourceDefinitionType = Value
-  val CONFIG,API = Value
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/AnomalyDetectionMethod.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/AnomalyDetectionMethod.scala
deleted file mode 100644
index 81a7023..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/AnomalyDetectionMethod.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.model
-
-object AnomalyDetectionMethod extends Enumeration {
-  type AnomalyDetectionMethod = Value
-  val EMA, TUKEYS, KS, HSDEV, UNKOWN = Value
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/AnomalyType.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/AnomalyType.scala
deleted file mode 100644
index 817180e..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/AnomalyType.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.model
-
-import javax.xml.bind.annotation.XmlRootElement
-
-@XmlRootElement
-object AnomalyType extends Enumeration {
-  type AnomalyType = Value
-   val POINT_IN_TIME, TREND, UNKNOWN = Value
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/MetricAnomalyInstance.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/MetricAnomalyInstance.scala
deleted file mode 100644
index 248a380..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/MetricAnomalyInstance.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.model
-
-import javax.xml.bind.annotation.XmlRootElement
-
-import org.apache.ambari.metrics.adservice.metadata.MetricKey
-import org.apache.ambari.metrics.adservice.model.AnomalyType.AnomalyType
-
-@XmlRootElement
-abstract class MetricAnomalyInstance {
-
-  val metricKey: MetricKey
-  val anomalyType: AnomalyType
-
-}
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/PointInTimeAnomalyInstance.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/PointInTimeAnomalyInstance.scala
deleted file mode 100644
index 470cc2c..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/PointInTimeAnomalyInstance.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.model
-
-import java.util.Date
-
-import org.apache.ambari.metrics.adservice.metadata.MetricKey
-import org.apache.ambari.metrics.adservice.model.AnomalyDetectionMethod.AnomalyDetectionMethod
-import org.apache.ambari.metrics.adservice.model.AnomalyType.AnomalyType
-
-class PointInTimeAnomalyInstance(val metricKey: MetricKey,
-                                 val timestamp: Long,
-                                 val metricValue: Double,
-                                 val methodType: AnomalyDetectionMethod,
-                                 val anomalyScore: Double,
-                                 val anomalousSeason: Season,
-                                 val modelParameters: String) extends MetricAnomalyInstance {
-
-  override val anomalyType: AnomalyType = AnomalyType.POINT_IN_TIME
-
-  private def anomalyToString : String = {
-      "Method=" + methodType + ", AnomalyScore=" + anomalyScore + ", Season=" + anomalousSeason.toString +
-        ", Model Parameters=" + modelParameters
-  }
-
-  @Override
-  override def toString: String = {
-    "Metric : [" + metricKey.toString + ", Metric Value=" + metricValue + " @ Time = " + new Date(timestamp) +  "], Anomaly : [" + anomalyToString + "]"
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/Range.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/Range.scala
deleted file mode 100644
index 4ad35e7..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/Range.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.model
-
-/**
-  * Class to capture a Range in a Season.
-  * For example Monday - Wednesday is a 'Range' in a DAY Season.
-  * @param lower lower end
-  * @param higher higher end
-  */
-case class Range (lower: Int, higher: Int) {
-
-  def withinRange(value: Int) : Boolean = {
-    if (lower <= higher) {
-      (value >= lower) && (value <= higher)
-    } else {
-      !(value > higher) && (value < lower)
-    }
-  }
-
-  @Override
-  override def equals(obj: scala.Any): Boolean = {
-    if (obj == null) {
-      return false
-    }
-    val that : Range = obj.asInstanceOf[Range]
-    (lower == that.lower) && (higher == that.higher)
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/Season.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/Season.scala
deleted file mode 100644
index 84784bc..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/Season.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.model
-
-import java.time.DayOfWeek
-import java.util.Calendar
-
-import javax.xml.bind.annotation.XmlRootElement
-
-import org.apache.ambari.metrics.adservice.model.SeasonType.SeasonType
-
-import com.fasterxml.jackson.databind.ObjectMapper
-import com.fasterxml.jackson.module.scala.DefaultScalaModule
-import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
-
-/**
-  * Class to capture a 'Season' for a metric anomaly.
-  * A Season is a combination of DAY Range and HOUR Range.
-  * @param DAY Day Range
-  * @param HOUR Hour Range
-  */
-@XmlRootElement
-case class Season(var DAY: Range, var HOUR: Range) {
-
-  def belongsTo(timestamp : Long) : Boolean = {
-    val c = Calendar.getInstance
-    c.setTimeInMillis(timestamp)
-    val dayOfWeek = c.get(Calendar.DAY_OF_WEEK)
-    val hourOfDay = c.get(Calendar.HOUR_OF_DAY)
-
-    if (DAY.lower != -1 && !DAY.withinRange(dayOfWeek))
-      return false
-    if (HOUR.lower != -1 && !HOUR.withinRange(hourOfDay))
-      return false
-    true
-  }
-
-  @Override
-  override def equals(obj: scala.Any): Boolean = {
-
-    if (obj == null) {
-      return false
-    }
-
-    val that : Season = obj.asInstanceOf[Season]
-    DAY.equals(that.DAY) && HOUR.equals(that.HOUR)
-  }
-
-  @Override
-  override def toString: String = {
-
-    var prettyPrintString = ""
-
-    var dLower: Int = DAY.lower - 1
-    if (dLower == 0) {
-      dLower = 7
-    }
-
-    var dHigher: Int = DAY.higher - 1
-    if (dHigher == 0) {
-      dHigher = 7
-    }
-
-    if (DAY != null) {
-      prettyPrintString = prettyPrintString.concat("DAY : [" + DayOfWeek.of(dLower) + "," + DayOfWeek.of(dHigher)) + "]"
-    }
-
-    if (HOUR != null) {
-      prettyPrintString = prettyPrintString.concat(" HOUR : [" + HOUR.lower + "," + HOUR.higher) + "]"
-    }
-    prettyPrintString
-  }
-}
-
-object Season {
-
-  def apply(DAY: Range, HOUR: Range): Season = new Season(DAY, HOUR)
-
-  def apply(range: Range, seasonType: SeasonType): Season = {
-    if (seasonType.equals(SeasonType.DAY)) {
-      new Season(range, Range(-1,-1))
-    } else {
-      new Season(Range(-1,-1), range)
-    }
-  }
-
-  val mapper = new ObjectMapper() with ScalaObjectMapper
-  mapper.registerModule(DefaultScalaModule)
-
-  def getSeasons(timestamp: Long, seasons : List[Season]) : List[Season] = {
-    val validSeasons : scala.collection.mutable.MutableList[Season] = scala.collection.mutable.MutableList.empty[Season]
-    for ( season <- seasons ) {
-      if (season.belongsTo(timestamp)) {
-        validSeasons += season
-      }
-    }
-    validSeasons.toList
-  }
-
-  def toJson(season: Season) : String = {
-    mapper.writeValueAsString(season)
-  }
-
-  def fromJson(seasonString: String) : Season = {
-    mapper.readValue[Season](seasonString)
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/SeasonType.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/SeasonType.scala
deleted file mode 100644
index b510531..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/SeasonType.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.model
-
-object SeasonType extends Enumeration{
-
-  type SeasonType = Value
-  val DAY,HOUR = Value
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/TimeRange.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/TimeRange.scala
deleted file mode 100644
index 0be2564..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/TimeRange.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.model
-
-import java.util.Date
-
-/**
-  * A special form of a 'Range' class to denote Time range.
-  */
-case class TimeRange (startTime: Long, endTime: Long) {
-  @Override
-  override def toString: String = {
-    "StartTime=" + new Date(startTime) + ", EndTime=" + new Date(endTime)
-  }
-
-  @Override
-  override def equals(obj: scala.Any): Boolean = {
-    if (obj == null) {
-      return false
-    }
-    val that : TimeRange = obj.asInstanceOf[TimeRange]
-    (startTime == that.startTime) && (endTime == that.endTime)
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/TrendAnomalyInstance.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/TrendAnomalyInstance.scala
deleted file mode 100644
index d67747c..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/model/TrendAnomalyInstance.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.model
-
-import org.apache.ambari.metrics.adservice.metadata.MetricKey
-import org.apache.ambari.metrics.adservice.model.AnomalyDetectionMethod.AnomalyDetectionMethod
-import org.apache.ambari.metrics.adservice.model.AnomalyType.AnomalyType
-
-case class TrendAnomalyInstance (metricKey: MetricKey,
-                                 anomalousPeriod: TimeRange,
-                                 referencePeriod: TimeRange,
-                                 methodType: AnomalyDetectionMethod,
-                                 anomalyScore: Double,
-                                 seasonInfo: Season,
-                                 modelParameters: String) extends MetricAnomalyInstance {
-
-  override val anomalyType: AnomalyType = AnomalyType.POINT_IN_TIME
-
-  private def anomalyToString : String = {
-    "Method=" + methodType + ", AnomalyScore=" + anomalyScore + ", Season=" + anomalousPeriod.toString +
-      ", Model Parameters=" + modelParameters
-  }
-
-  @Override
-  override def toString: String = {
-    "Metric : [" + metricKey.toString + ", AnomalousPeriod=" + anomalousPeriod + ", ReferencePeriod=" + referencePeriod +
-      "], Anomaly : [" + anomalyToString + "]"
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/AnomalyResource.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/AnomalyResource.scala
deleted file mode 100644
index db12307..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/AnomalyResource.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.resource
-
-import javax.ws.rs.core.MediaType.APPLICATION_JSON
-import javax.ws.rs.core.Response
-import javax.ws.rs.{GET, Path, Produces, QueryParam}
-
-import org.apache.ambari.metrics.adservice.model.{AnomalyType, MetricAnomalyInstance}
-import org.apache.ambari.metrics.adservice.model.AnomalyType.AnomalyType
-import org.apache.ambari.metrics.adservice.service.ADQueryService
-import org.apache.commons.lang.StringUtils
-
-import com.google.inject.Inject
-
-@Path("/anomaly")
-class AnomalyResource {
-
-  @Inject
-  var aDQueryService: ADQueryService = _
-
-  @GET
-  @Produces(Array(APPLICATION_JSON))
-  def getTopNAnomalies(@QueryParam("type") anType: String,
-                       @QueryParam("startTime") startTime: Long,
-                       @QueryParam("endTime") endTime: Long,
-                       @QueryParam("top") limit: Int): Response = {
-
-    val anomalies: List[MetricAnomalyInstance] = aDQueryService.getTopNAnomaliesByType(
-      parseAnomalyType(anType),
-      parseStartTime(startTime),
-      parseEndTime(endTime),
-      parseTop(limit))
-
-    Response.ok.entity(anomalies).build()
-  }
-
-  private def parseAnomalyType(anomalyType: String) : AnomalyType = {
-    if (StringUtils.isEmpty(anomalyType)) {
-      return AnomalyType.POINT_IN_TIME
-    }
-    AnomalyType.withName(anomalyType.toUpperCase)
-  }
-
-  private def parseStartTime(startTime: Long) : Long = {
-    if (startTime > 0l) {
-      return startTime
-    }
-    System.currentTimeMillis() - 60*60*1000
-  }
-
-  private def parseEndTime(endTime: Long) : Long = {
-    if (endTime > 0l) {
-      return endTime
-    }
-    System.currentTimeMillis()
-  }
-
-  private def parseTop(limit: Int) : Int = {
-    if (limit > 0) {
-      return limit
-    }
-    5
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/MetricDefinitionResource.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/MetricDefinitionResource.scala
deleted file mode 100644
index 442bf46..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/MetricDefinitionResource.scala
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.resource
-
-import javax.ws.rs._
-import javax.ws.rs.core.MediaType.APPLICATION_JSON
-import javax.ws.rs.core.Response
-
-import org.apache.ambari.metrics.adservice.metadata.{MetricDefinitionService, MetricKey, MetricSourceDefinition}
-import org.apache.commons.lang.StringUtils
-
-import com.google.inject.Inject
-
-@Path("/metric-definition")
-class MetricDefinitionResource {
-
-  @Inject
-  var metricDefinitionService: MetricDefinitionService = _
-
-  @GET
-  @Produces(Array(APPLICATION_JSON))
-  @Path("/{name}")
-  def defaultGet(@PathParam("name") definitionName: String): Response  = {
-
-    if (StringUtils.isEmpty(definitionName)) {
-      Response.ok.entity(Map("message" -> "Definition name cannot be empty. Use query parameter 'name'")).build()
-    }
-    val metricSourceDefinition = metricDefinitionService.getDefinitionByName(definitionName)
-    if (metricSourceDefinition != null) {
-      Response.ok.entity(metricSourceDefinition).build()
-    } else {
-      Response.ok.entity(Map("message" -> "Definition not found")).build()
-    }
-  }
-
-  @GET
-  @Produces(Array(APPLICATION_JSON))
-  def getAllMetricDefinitions: Response  = {
-    val metricSourceDefinitionMap: List[MetricSourceDefinition] = metricDefinitionService.getDefinitions
-    Response.ok.entity(metricSourceDefinitionMap).build()
-  }
-
-  @GET
-  @Path("/keys")
-  @Produces(Array(APPLICATION_JSON))
-  def getMetricKeys: Response  = {
-    val metricKeyMap:  Map[String, Set[MetricKey]] = metricDefinitionService.getMetricKeys
-    Response.ok.entity(metricKeyMap).build()
-  }
-
-  @POST
-  @Produces(Array(APPLICATION_JSON))
-  def defaultPost(definition: MetricSourceDefinition) : Response = {
-    if (definition == null) {
-      Response.ok.entity(Map("message" -> "Definition content cannot be empty.")).build()
-    }
-    val success : Boolean = metricDefinitionService.addDefinition(definition)
-    if (success) {
-      Response.ok.entity(Map("message" -> "Definition saved")).build()
-    } else {
-      Response.ok.entity(Map("message" -> "Definition could not be saved")).build()
-    }
-  }
-
-  @PUT
-  @Produces(Array(APPLICATION_JSON))
-  def defaultPut(definition: MetricSourceDefinition) : Response = {
-    if (definition == null) {
-      Response.ok.entity(Map("message" -> "Definition content cannot be empty.")).build()
-    }
-    val success : Boolean = metricDefinitionService.updateDefinition(definition)
-    if (success) {
-      Response.ok.entity(Map("message" -> "Definition updated")).build()
-    } else {
-      Response.ok.entity(Map("message" -> "Definition could not be updated")).build()
-    }
-  }
-
-  @DELETE
-  @Produces(Array(APPLICATION_JSON))
-  @Path("/{name}")
-  def defaultDelete(@PathParam("name") definitionName: String): Response  = {
-
-    if (StringUtils.isEmpty(definitionName)) {
-      Response.ok.entity(Map("message" -> "Definition name cannot be empty. Use query parameter 'name'")).build()
-    }
-    val success: Boolean = metricDefinitionService.deleteDefinitionByName(definitionName)
-    if (success) {
-      Response.ok.entity(Map("message" -> "Definition deleted")).build()
-    } else {
-      Response.ok.entity(Map("message" -> "Definition could not be deleted")).build()
-    }
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/RootResource.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/RootResource.scala
deleted file mode 100644
index fd55b64..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/RootResource.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.resource
-
-import java.time.LocalDateTime
-
-import javax.ws.rs.core.MediaType.APPLICATION_JSON
-import javax.ws.rs.core.Response
-import javax.ws.rs.{GET, Path, Produces}
-
-import org.joda.time.DateTime
-
-@Path("/")
-class RootResource {
-
-  @Produces(Array(APPLICATION_JSON))
-  @GET
-  def default: Response = {
-    val dtf = java.time.format.DateTimeFormatter.ofPattern("yyyy/MM/dd HH:mm")
-    Response.ok.entity(Map("name" -> "anomaly-detection-service", "today" -> LocalDateTime.now)).build()
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/SubsystemResource.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/SubsystemResource.scala
deleted file mode 100644
index e7d7c9a..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/resource/SubsystemResource.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.resource
-
-class SubsystemResource {
-
-  /*
-    GET / UPDATE - parameters (which subsystem, parameters)
-    POST - Update sensitivity of a subsystem (which subsystem, increase or decrease, factor)
-   */
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/service/ADQueryService.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/service/ADQueryService.scala
deleted file mode 100644
index 2cfa30f..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/service/ADQueryService.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.service
-
-import org.apache.ambari.metrics.adservice.model.AnomalyType.AnomalyType
-import org.apache.ambari.metrics.adservice.model.MetricAnomalyInstance
-
-trait ADQueryService extends AbstractADService{
-
-  /**
-    * API to return list of single metric anomalies satisfying a set of conditions from the anomaly store.
-    * @param anomalyType Type of the anomaly (Point In Time / Trend)
-    * @param startTime Start of time range
-    * @param endTime End of time range
-    * @param limit Maximim number of anomaly metrics that need to be returned based on anomaly score.
-    * @return
-    */
-  def getTopNAnomaliesByType(anomalyType: AnomalyType, startTime: Long, endTime: Long, limit: Int): List[MetricAnomalyInstance]
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/service/ADQueryServiceImpl.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/service/ADQueryServiceImpl.scala
deleted file mode 100644
index 3b49208..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/service/ADQueryServiceImpl.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.service
-import org.apache.ambari.metrics.adservice.db.AdAnomalyStoreAccessor
-import org.apache.ambari.metrics.adservice.model.AnomalyType.AnomalyType
-import org.apache.ambari.metrics.adservice.model.MetricAnomalyInstance
-import org.slf4j.{Logger, LoggerFactory}
-
-import com.google.inject.{Inject, Singleton}
-
-@Singleton
-class ADQueryServiceImpl extends ADQueryService {
-
-  val LOG : Logger = LoggerFactory.getLogger(classOf[ADQueryServiceImpl])
-
-  @Inject
-  var adAnomalyStoreAccessor: AdAnomalyStoreAccessor = _
-
-  /**
-    * Initialize Service
-    */
-  override def initialize(): Unit = {
-    LOG.info("Initializing AD Query Service...")
-    adAnomalyStoreAccessor.initialize()
-    LOG.info("Successfully initialized AD Query Service.")
-  }
-
-  /**
-    * Implementation to return list of anomalies satisfying a set of conditions from the anomaly store.
-    *
-    * @param anomalyType Type of the anomaly (Point In Time / Trend)
-    * @param startTime   Start of time range
-    * @param endTime     End of time range
-    * @param limit       Maximim number of anomaly metrics that need to be returned based on anomaly score.
-    * @return
-    */
-  override def getTopNAnomaliesByType(anomalyType: AnomalyType, startTime: Long, endTime: Long, limit: Int): List[MetricAnomalyInstance] = {
-    val anomalies = adAnomalyStoreAccessor.getMetricAnomalies(anomalyType, startTime, endTime, limit)
-    anomalies
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/service/AbstractADService.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/service/AbstractADService.scala
deleted file mode 100644
index 56bb999..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/service/AbstractADService.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  *//**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.service
-
-trait AbstractADService {
-
-  /**
-    * Initialize Service
-    */
-  def initialize(): Unit
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/spark/prototype/MetricAnomalyDetector.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/spark/prototype/MetricAnomalyDetector.scala
deleted file mode 100644
index 90c564e..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/spark/prototype/MetricAnomalyDetector.scala
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.spark.prototype
-
-object MetricAnomalyDetector {
-
-  /*
-    Load current EMA model
-    Filter step - Check if anomaly
-    Collect / Write to AMS / Print.
-   */
-
-//  var brokers = "avijayan-ams-1.openstacklocal:2181,avijayan-ams-2.openstacklocal:2181,avijayan-ams-3.openstacklocal:2181"
-//  var groupId = "ambari-metrics-group"
-//  var topicName = "ambari-metrics-topic"
-//  var numThreads = 1
-//  val anomalyDetectionModels: Array[AnomalyDetectionTechnique] = Array[AnomalyDetectionTechnique]()
-//
-//  def readProperties(propertiesFile: String): Properties = try {
-//    val properties = new Properties
-//    var inputStream = ClassLoader.getSystemResourceAsStream(propertiesFile)
-//    if (inputStream == null) inputStream = new FileInputStream(propertiesFile)
-//    properties.load(inputStream)
-//    properties
-//  } catch {
-//    case ioEx: IOException =>
-//      null
-//  }
-//
-//  def main(args: Array[String]): Unit = {
-//
-//    @transient
-//    lazy val log = org.apache.log4j.LogManager.getLogger("MetricAnomalyDetectorLogger")
-//
-//    if (args.length < 1) {
-//      System.err.println("Usage: MetricSparkConsumer <input-config-file>")
-//      System.exit(1)
-//    }
-//
-//    //Read properties
-//    val properties = readProperties(propertiesFile = args(0))
-//
-//    //Load EMA parameters - w, n
-//    val emaW = properties.getProperty("emaW").toDouble
-//    val emaN = properties.getProperty("emaN").toDouble
-//
-//    //collector info
-//    val collectorHost: String = properties.getProperty("collectorHost")
-//    val collectorPort: String = properties.getProperty("collectorPort")
-//    val collectorProtocol: String = properties.getProperty("collectorProtocol")
-//    val anomalyMetricPublisher = new MetricsCollectorInterface(collectorHost, collectorProtocol, collectorPort)
-//
-//    //Instantiate Kafka stream reader
-//    val sparkConf = new SparkConf().setAppName("AmbariMetricsAnomalyDetector")
-//    val streamingContext = new StreamingContext(sparkConf, Duration(10000))
-//
-//    val topicsSet = topicName.toSet
-//    val kafkaParams = Map[String, String]("metadata.broker.list" -> brokers)
-////    val stream = KafkaUtils.createDirectStream()
-//
-//    val kafkaStream = KafkaUtils.createStream(streamingContext, zkQuorum, groupId, Map(topicName -> numThreads), StorageLevel.MEMORY_AND_DISK_SER_2)
-//    kafkaStream.print()
-//
-//    var timelineMetricsStream = kafkaStream.map( message => {
-//      val mapper = new ObjectMapper
-//      val metrics = mapper.readValue(message._2, classOf[TimelineMetrics])
-//      metrics
-//    })
-//    timelineMetricsStream.print()
-//
-//    var appMetricStream = timelineMetricsStream.map( timelineMetrics => {
-//      (timelineMetrics.getMetrics.get(0).getAppId, timelineMetrics)
-//    })
-//    appMetricStream.print()
-//
-//    var filteredAppMetricStream = appMetricStream.filter( appMetricTuple => {
-//      appIds.contains(appMetricTuple._1)
-//    } )
-//    filteredAppMetricStream.print()
-//
-//    filteredAppMetricStream.foreachRDD( rdd => {
-//      rdd.foreach( appMetricTuple => {
-//        val timelineMetrics = appMetricTuple._2
-//        logger.info("Received Metric (1): " + timelineMetrics.getMetrics.get(0).getMetricName)
-//        log.info("Received Metric (2): " + timelineMetrics.getMetrics.get(0).getMetricName)
-//        for (timelineMetric <- timelineMetrics.getMetrics) {
-//          var anomalies = emaModel.test(timelineMetric)
-//          anomalyMetricPublisher.publish(anomalies)
-//        }
-//      })
-//    })
-//
-//    streamingContext.start()
-//    streamingContext.awaitTermination()
-//  }
-  }
diff --git a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/spark/prototype/SparkPhoenixReader.scala b/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/spark/prototype/SparkPhoenixReader.scala
deleted file mode 100644
index 466225f..0000000
--- a/ambari-metrics-anomaly-detection-service/src/main/scala/org/apache/ambari/metrics/adservice/spark/prototype/SparkPhoenixReader.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.spark.prototype
-
-object SparkPhoenixReader {
-
-  def main(args: Array[String]) {
-
-//    if (args.length < 6) {
-//      System.err.println("Usage: SparkPhoenixReader <metric_name> <appId> <hostname> <weight> <timessdev> <phoenixConnectionString> <model_dir>")
-//      System.exit(1)
-//    }
-//
-//    var metricName = args(0)
-//    var appId = args(1)
-//    var hostname = args(2)
-//    var weight = args(3).toDouble
-//    var timessdev = args(4).toInt
-//    var phoenixConnectionString = args(5) //avijayan-ams-3.openstacklocal:61181:/ams-hbase-unsecure
-//    var modelDir = args(6)
-//
-//    val conf = new SparkConf()
-//    conf.set("spark.app.name", "AMSAnomalyModelBuilder")
-//    //conf.set("spark.master", "spark://avijayan-ams-2.openstacklocal:7077")
-//
-//    var sc = new SparkContext(conf)
-//    val sqlContext = new SQLContext(sc)
-//
-//    val currentTime = System.currentTimeMillis()
-//    val oneDayBack = currentTime - 24*60*60*1000
-//
-//    val df = sqlContext.load("org.apache.phoenix.spark", Map("table" -> "METRIC_RECORD", "zkUrl" -> phoenixConnectionString))
-//    df.registerTempTable("METRIC_RECORD")
-//    val result = sqlContext.sql("SELECT METRIC_NAME, HOSTNAME, APP_ID, SERVER_TIME, METRIC_SUM, METRIC_COUNT FROM METRIC_RECORD " +
-//      "WHERE METRIC_NAME = '" + metricName + "' AND HOSTNAME = '" + hostname + "' AND APP_ID = '" + appId + "' AND SERVER_TIME < " + currentTime + " AND SERVER_TIME > " + oneDayBack)
-//
-//    var metricValues = new java.util.TreeMap[java.lang.Long, java.lang.Double]
-//    result.collect().foreach(
-//      t => metricValues.put(t.getLong(3), t.getDouble(4) / t.getInt(5))
-//    )
-//
-//    //val seriesName = result.head().getString(0)
-//    //val hostname = result.head().getString(1)
-//    //val appId = result.head().getString(2)
-//
-//    val timelineMetric = new TimelineMetric()
-//    timelineMetric.setMetricName(metricName)
-//    timelineMetric.setAppId(appId)
-//    timelineMetric.setHostName(hostname)
-//    timelineMetric.setMetricValues(metricValues)
-//
-//    var emaModel = new EmaTechnique(weight, timessdev)
-//    emaModel.test(timelineMetric)
-//    emaModel.save(sc, modelDir)
-
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/prototype/TestEmaTechnique.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/prototype/TestEmaTechnique.java
deleted file mode 100644
index 76a00a6..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/prototype/TestEmaTechnique.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype;
-
-import org.apache.ambari.metrics.adservice.prototype.core.RFunctionInvoker;
-import org.apache.ambari.metrics.adservice.prototype.methods.MetricAnomaly;
-import org.apache.ambari.metrics.adservice.prototype.methods.ema.EmaTechnique;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.junit.Assert;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.io.File;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.util.List;
-import java.util.TreeMap;
-
-import static org.apache.ambari.metrics.adservice.prototype.TestRFunctionInvoker.getTS;
-
-public class TestEmaTechnique {
-
-  private static double[] ts;
-  private static String fullFilePath;
-
-  @BeforeClass
-  public static void init() throws URISyntaxException {
-
-    Assume.assumeTrue(System.getenv("R_HOME") != null);
-    ts = getTS(1000);
-    URL url = ClassLoader.getSystemResource("R-scripts");
-    fullFilePath = new File(url.toURI()).getAbsolutePath();
-    RFunctionInvoker.setScriptsDir(fullFilePath);
-  }
-
-  @Test
-  public void testEmaInitialization() {
-
-    EmaTechnique ema = new EmaTechnique(0.5, 3);
-    Assert.assertTrue(ema.getTrackedEmas().isEmpty());
-    Assert.assertTrue(ema.getStartingWeight() == 0.5);
-    Assert.assertTrue(ema.getStartTimesSdev() == 2);
-  }
-
-  @Test
-  public void testEma() {
-    EmaTechnique ema = new EmaTechnique(0.5, 3);
-
-    long now = System.currentTimeMillis();
-
-    TimelineMetric metric1 = new TimelineMetric();
-    metric1.setMetricName("M1");
-    metric1.setHostName("H1");
-    metric1.setStartTime(now - 1000);
-    metric1.setAppId("A1");
-    metric1.setInstanceId(null);
-    metric1.setType("Integer");
-
-    //Train
-    TreeMap<Long, Double> metricValues = new TreeMap<Long, Double>();
-    for (int i = 0; i < 50; i++) {
-      double metric = 20000 + Math.random();
-      metricValues.put(now - i * 100, metric);
-    }
-    metric1.setMetricValues(metricValues);
-    List<MetricAnomaly> anomalyList = ema.test(metric1);
-//    Assert.assertTrue(anomalyList.isEmpty());
-
-    metricValues = new TreeMap<Long, Double>();
-    for (int i = 0; i < 50; i++) {
-      double metric = 20000 + Math.random();
-      metricValues.put(now - i * 100, metric);
-    }
-    metric1.setMetricValues(metricValues);
-    anomalyList = ema.test(metric1);
-    Assert.assertTrue(!anomalyList.isEmpty());
-    int l1 = anomalyList.size();
-
-    Assert.assertTrue(ema.updateModel(metric1, false, 20));
-    anomalyList = ema.test(metric1);
-    int l2 = anomalyList.size();
-    Assert.assertTrue(l2 < l1);
-
-    Assert.assertTrue(ema.updateModel(metric1, true, 50));
-    anomalyList = ema.test(metric1);
-    int l3 = anomalyList.size();
-    Assert.assertTrue(l3 > l2 && l3 > l1);
-
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/prototype/TestRFunctionInvoker.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/prototype/TestRFunctionInvoker.java
deleted file mode 100644
index 98fa050..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/prototype/TestRFunctionInvoker.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype;
-
-import org.apache.ambari.metrics.adservice.prototype.common.DataSeries;
-import org.apache.ambari.metrics.adservice.prototype.common.ResultSet;
-import org.apache.ambari.metrics.adservice.prototype.core.RFunctionInvoker;
-import org.apache.ambari.metrics.adservice.seriesgenerator.UniformMetricSeries;
-import org.apache.commons.lang.ArrayUtils;
-import org.junit.Assert;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.io.File;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.util.HashMap;
-import java.util.Map;
-
-public class TestRFunctionInvoker {
-
-  private static String metricName = "TestMetric";
-  private static double[] ts;
-  private static String fullFilePath;
-
-  @BeforeClass
-  public static void init() throws URISyntaxException {
-
-    Assume.assumeTrue(System.getenv("R_HOME") != null);
-    ts = getTS(1000);
-    URL url = ClassLoader.getSystemResource("R-scripts");
-    fullFilePath = new File(url.toURI()).getAbsolutePath();
-    RFunctionInvoker.setScriptsDir(fullFilePath);
-  }
-
-  @Test
-  public void testTukeys() throws URISyntaxException {
-
-    double[] train_ts = ArrayUtils.subarray(ts, 0, 750);
-    double[] train_x = getRandomData(750);
-    DataSeries trainData = new DataSeries(metricName, train_ts, train_x);
-
-    double[] test_ts = ArrayUtils.subarray(ts, 750, 1000);
-    double[] test_x = getRandomData(250);
-    test_x[50] = 5.5; //Anomaly
-    DataSeries testData = new DataSeries(metricName, test_ts, test_x);
-    Map<String, String> configs = new HashMap();
-    configs.put("tukeys.n", "3");
-
-    ResultSet rs = RFunctionInvoker.tukeys(trainData, testData, configs);
-    Assert.assertEquals(rs.resultset.size(), 2);
-    Assert.assertEquals(rs.resultset.get(1)[0], 5.5, 0.1);
-
-  }
-
-  public static void main(String[] args) throws URISyntaxException {
-
-    String metricName = "TestMetric";
-    double[] ts = getTS(1000);
-    URL url = ClassLoader.getSystemResource("R-scripts");
-    String fullFilePath = new File(url.toURI()).getAbsolutePath();
-    RFunctionInvoker.setScriptsDir(fullFilePath);
-
-    double[] train_ts = ArrayUtils.subarray(ts, 0, 750);
-    double[] train_x = getRandomData(750);
-    DataSeries trainData = new DataSeries(metricName, train_ts, train_x);
-
-    double[] test_ts = ArrayUtils.subarray(ts, 750, 1000);
-    double[] test_x = getRandomData(250);
-    test_x[50] = 5.5; //Anomaly
-    DataSeries testData = new DataSeries(metricName, test_ts, test_x);
-    ResultSet rs;
-
-    Map<String, String> configs = new HashMap();
-
-    System.out.println("TUKEYS");
-    configs.put("tukeys.n", "3");
-    rs = RFunctionInvoker.tukeys(trainData, testData, configs);
-    rs.print();
-    System.out.println("--------------");
-
-//    System.out.println("EMA Global");
-//    configs.put("ema.n", "3");
-//    configs.put("ema.w", "0.8");
-//    rs = RFunctionInvoker.ema_global(trainData, testData, configs);
-//    rs.print();
-//    System.out.println("--------------");
-//
-//    System.out.println("EMA Daily");
-//    rs = RFunctionInvoker.ema_daily(trainData, testData, configs);
-//    rs.print();
-//    System.out.println("--------------");
-//
-//    configs.put("ks.p_value", "0.00005");
-//    System.out.println("KS Test");
-//    rs = RFunctionInvoker.ksTest(trainData, testData, configs);
-//    rs.print();
-//    System.out.println("--------------");
-//
-    ts = getTS(5000);
-    train_ts = ArrayUtils.subarray(ts, 0, 4800);
-    train_x = getRandomData(4800);
-    trainData = new DataSeries(metricName, train_ts, train_x);
-    test_ts = ArrayUtils.subarray(ts, 4800, 5000);
-    test_x = getRandomData(200);
-    for (int i = 0; i < 200; i++) {
-      test_x[i] = test_x[i] * 5;
-    }
-    testData = new DataSeries(metricName, test_ts, test_x);
-    configs.put("hsdev.n", "3");
-    configs.put("hsdev.nhp", "3");
-    configs.put("hsdev.interval", "86400000");
-    configs.put("hsdev.period", "604800000");
-    System.out.println("HSdev");
-    rs = RFunctionInvoker.hsdev(trainData, testData, configs);
-    rs.print();
-    System.out.println("--------------");
-
-  }
-
-  static double[] getTS(int n) {
-    long currentTime = System.currentTimeMillis();
-    double[] ts = new double[n];
-    currentTime = currentTime - (currentTime % (5 * 60 * 1000));
-
-    for (int i = 0, j = n - 1; i < n; i++, j--) {
-      ts[j] = currentTime;
-      currentTime = currentTime - (5 * 60 * 1000);
-    }
-    return ts;
-  }
-
-  static double[] getRandomData(int n) {
-
-    UniformMetricSeries metricSeries =  new UniformMetricSeries(10, 0.1,0.05, 0.6, 0.8, true);
-    return metricSeries.getSeries(n);
-
-//    double[] metrics = new double[n];
-//    Random random = new Random();
-//    for (int i = 0; i < n; i++) {
-//      metrics[i] = random.nextDouble();
-//    }
-//    return metrics;
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/prototype/TestTukeys.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/prototype/TestTukeys.java
deleted file mode 100644
index 1077a9c..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/prototype/TestTukeys.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.prototype;
-
-import org.apache.ambari.metrics.adservice.prototype.core.MetricsCollectorInterface;
-import org.apache.ambari.metrics.adservice.prototype.core.RFunctionInvoker;
-import org.apache.ambari.metrics.adservice.prototype.methods.MetricAnomaly;
-import org.apache.ambari.metrics.adservice.prototype.methods.ema.EmaTechnique;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.io.File;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.net.UnknownHostException;
-import java.util.List;
-import java.util.TreeMap;
-
-public class TestTukeys {
-
-  @BeforeClass
-  public static void init() throws URISyntaxException {
-    Assume.assumeTrue(System.getenv("R_HOME") != null);
-  }
-
-  @Test
-  public void testPointInTimeDetectionSystem() throws UnknownHostException, URISyntaxException {
-
-    URL url = ClassLoader.getSystemResource("R-scripts");
-    String fullFilePath = new File(url.toURI()).getAbsolutePath();
-    RFunctionInvoker.setScriptsDir(fullFilePath);
-
-    MetricsCollectorInterface metricsCollectorInterface = new MetricsCollectorInterface("avijayan-ams-1.openstacklocal","http", "6188");
-
-    EmaTechnique ema = new EmaTechnique(0.5, 3);
-    long now = System.currentTimeMillis();
-
-    TimelineMetric metric1 = new TimelineMetric();
-    metric1.setMetricName("mm9");
-    metric1.setHostName(MetricsCollectorInterface.getDefaultLocalHostName());
-    metric1.setStartTime(now);
-    metric1.setAppId("aa9");
-    metric1.setInstanceId(null);
-    metric1.setType("Integer");
-
-    //Train
-    TreeMap<Long, Double> metricValues = new TreeMap<Long, Double>();
-
-    //2hr data.
-    for (int i = 0; i < 120; i++) {
-      double metric = 20000 + Math.random();
-      metricValues.put(now - i * 60 * 1000, metric);
-    }
-    metric1.setMetricValues(metricValues);
-    TimelineMetrics timelineMetrics = new TimelineMetrics();
-    timelineMetrics.addOrMergeTimelineMetric(metric1);
-
-    metricsCollectorInterface.emitMetrics(timelineMetrics);
-
-    List<MetricAnomaly> anomalyList = ema.test(metric1);
-    metricsCollectorInterface.publish(anomalyList);
-//
-//    PointInTimeADSystem pointInTimeADSystem = new PointInTimeADSystem(ema, metricsCollectorInterface, 3, 5*60*1000, 15*60*1000);
-//    pointInTimeADSystem.runOnce();
-//
-//    List<MetricAnomaly> anomalyList2 = ema.test(metric1);
-//
-//    pointInTimeADSystem.runOnce();
-//    List<MetricAnomaly> anomalyList3 = ema.test(metric1);
-//
-//    pointInTimeADSystem.runOnce();
-//    List<MetricAnomaly> anomalyList4 = ema.test(metric1);
-//
-//    pointInTimeADSystem.runOnce();
-//    List<MetricAnomaly> anomalyList5 = ema.test(metric1);
-//
-//    pointInTimeADSystem.runOnce();
-//    List<MetricAnomaly> anomalyList6 = ema.test(metric1);
-//
-//    Assert.assertTrue(anomalyList6.size() < anomalyList.size());
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/AbstractMetricSeries.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/AbstractMetricSeries.java
deleted file mode 100644
index 635a929..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/AbstractMetricSeries.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.seriesgenerator;
-
-public interface AbstractMetricSeries {
-
-  public double nextValue();
-  public double[] getSeries(int n);
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/DualBandMetricSeries.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/DualBandMetricSeries.java
deleted file mode 100644
index a9e3f30..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/DualBandMetricSeries.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.seriesgenerator;
-
-import java.util.Random;
-
-public class DualBandMetricSeries implements AbstractMetricSeries {
-
-  double lowBandValue = 0.0;
-  double lowBandDeviationPercentage = 0.0;
-  int lowBandPeriodSize = 10;
-  double highBandValue = 1.0;
-  double highBandDeviationPercentage = 0.0;
-  int highBandPeriodSize = 10;
-
-  Random random = new Random();
-  double lowBandValueLowerLimit, lowBandValueHigherLimit;
-  double highBandLowerLimit, highBandUpperLimit;
-  int l = 0, h = 0;
-
-  public DualBandMetricSeries(double lowBandValue,
-                              double lowBandDeviationPercentage,
-                              int lowBandPeriodSize,
-                              double highBandValue,
-                              double highBandDeviationPercentage,
-                              int highBandPeriodSize) {
-    this.lowBandValue = lowBandValue;
-    this.lowBandDeviationPercentage = lowBandDeviationPercentage;
-    this.lowBandPeriodSize = lowBandPeriodSize;
-    this.highBandValue = highBandValue;
-    this.highBandDeviationPercentage = highBandDeviationPercentage;
-    this.highBandPeriodSize = highBandPeriodSize;
-    init();
-  }
-
-  private void init() {
-    lowBandValueLowerLimit = lowBandValue - lowBandDeviationPercentage * lowBandValue;
-    lowBandValueHigherLimit = lowBandValue + lowBandDeviationPercentage * lowBandValue;
-    highBandLowerLimit = highBandValue - highBandDeviationPercentage * highBandValue;
-    highBandUpperLimit = highBandValue + highBandDeviationPercentage * highBandValue;
-  }
-
-  @Override
-  public double nextValue() {
-
-    double value = 0.0;
-
-    if (l < lowBandPeriodSize) {
-      value = lowBandValueLowerLimit + (lowBandValueHigherLimit - lowBandValueLowerLimit) * random.nextDouble();
-      l++;
-    } else if (h < highBandPeriodSize) {
-      value = highBandLowerLimit + (highBandUpperLimit - highBandLowerLimit) * random.nextDouble();
-      h++;
-    }
-
-    if (l == lowBandPeriodSize && h == highBandPeriodSize) {
-      l = 0;
-      h = 0;
-    }
-
-    return value;
-  }
-
-  @Override
-  public double[] getSeries(int n) {
-    double[] series = new double[n];
-    for (int i = 0; i < n; i++) {
-      series[i] = nextValue();
-    }
-    return series;
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/MetricSeriesGeneratorFactory.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/MetricSeriesGeneratorFactory.java
deleted file mode 100644
index a50b433..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/MetricSeriesGeneratorFactory.java
+++ /dev/null
@@ -1,377 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.seriesgenerator;
-
-import java.util.Map;
-
-public class MetricSeriesGeneratorFactory {
-
-  /**
-   * Return a normally distributed data series with some deviation % and outliers.
-   *
-   * @param n                                size of the data series
-   * @param value                            The value around which the uniform data series is centered on.
-   * @param deviationPercentage              The allowed deviation % on either side of the uniform value. For example, if value = 10, and deviation % is 0.1, the series values lie between 0.9 to 1.1.
-   * @param outlierProbability               The probability of finding an outlier in the series.
-   * @param outlierDeviationLowerPercentage  min percentage outlier should be away from the uniform value in % terms. if value = 10 and outlierDeviationPercentage = 30%, the outlier is 7 and  13.
-   * @param outlierDeviationHigherPercentage max percentage outlier should be away from the uniform value in % terms. if value = 10 and outlierDeviationPercentage = 60%, the outlier is 4 and  16.
-   * @param outliersAboveValue               Outlier should be greater or smaller than the value.
-   * @return uniform series
-   */
-  public static double[] createUniformSeries(int n,
-                                             double value,
-                                             double deviationPercentage,
-                                             double outlierProbability,
-                                             double outlierDeviationLowerPercentage,
-                                             double outlierDeviationHigherPercentage,
-                                             boolean outliersAboveValue) {
-
-    UniformMetricSeries metricSeries = new UniformMetricSeries(value,
-      deviationPercentage,
-      outlierProbability,
-      outlierDeviationLowerPercentage,
-      outlierDeviationHigherPercentage,
-      outliersAboveValue);
-
-    return metricSeries.getSeries(n);
-  }
-
-
-  /**
-   * /**
-   * Returns a normally distributed series.
-   *
-   * @param n                             size of the data series
-   * @param mean                          mean of the distribution
-   * @param sd                            sd of the distribution
-   * @param outlierProbability            sd of the distribution
-   * @param outlierDeviationSDTimesLower  Lower Limit of the outlier with respect to times sdev from the mean.
-   * @param outlierDeviationSDTimesHigher Higher Limit of the outlier with respect to times sdev from the mean.
-   * @param outlierOnRightEnd             Outlier should be on the right end or the left end.
-   * @return normal series
-   */
-  public static double[] createNormalSeries(int n,
-                                            double mean,
-                                            double sd,
-                                            double outlierProbability,
-                                            double outlierDeviationSDTimesLower,
-                                            double outlierDeviationSDTimesHigher,
-                                            boolean outlierOnRightEnd) {
-
-
-    NormalMetricSeries metricSeries = new NormalMetricSeries(mean,
-      sd,
-      outlierProbability,
-      outlierDeviationSDTimesLower,
-      outlierDeviationSDTimesHigher,
-      outlierOnRightEnd);
-
-    return metricSeries.getSeries(n);
-  }
-
-
-  /**
-   * Returns a monotonically increasing / decreasing series
-   *
-   * @param n                                size of the data series
-   * @param startValue                       Start value of the monotonic sequence
-   * @param slope                            direction of monotonicity m > 0 for increasing and m < 0 for decreasing.
-   * @param deviationPercentage              The allowed deviation % on either side of the current 'y' value. For example, if current value = 10 according to slope, and deviation % is 0.1, the series values lie between 0.9 to 1.1.
-   * @param outlierProbability               The probability of finding an outlier in the series.
-   * @param outlierDeviationLowerPercentage  min percentage outlier should be away from the current 'y' value in % terms. if value = 10 and outlierDeviationPercentage = 30%, the outlier is 7 and  13.
-   * @param outlierDeviationHigherPercentage max percentage outlier should be away from the current 'y' value in % terms. if value = 10 and outlierDeviationPercentage = 60%, the outlier is 4 and  16.
-   * @param outliersAboveValue               Outlier should be greater or smaller than the 'y' value.
-   * @return
-   */
-  public static double[] createMonotonicSeries(int n,
-                                               double startValue,
-                                               double slope,
-                                               double deviationPercentage,
-                                               double outlierProbability,
-                                               double outlierDeviationLowerPercentage,
-                                               double outlierDeviationHigherPercentage,
-                                               boolean outliersAboveValue) {
-
-    MonotonicMetricSeries metricSeries = new MonotonicMetricSeries(startValue,
-      slope,
-      deviationPercentage,
-      outlierProbability,
-      outlierDeviationLowerPercentage,
-      outlierDeviationHigherPercentage,
-      outliersAboveValue);
-
-    return metricSeries.getSeries(n);
-  }
-
-
-  /**
-   * Returns a dual band series (lower and higher)
-   *
-   * @param n                           size of the data series
-   * @param lowBandValue                lower band value
-   * @param lowBandDeviationPercentage  lower band deviation
-   * @param lowBandPeriodSize           lower band
-   * @param highBandValue               high band centre value
-   * @param highBandDeviationPercentage high band deviation.
-   * @param highBandPeriodSize          high band size
-   * @return
-   */
-  public static double[] getDualBandSeries(int n,
-                                           double lowBandValue,
-                                           double lowBandDeviationPercentage,
-                                           int lowBandPeriodSize,
-                                           double highBandValue,
-                                           double highBandDeviationPercentage,
-                                           int highBandPeriodSize) {
-
-    DualBandMetricSeries metricSeries  = new DualBandMetricSeries(lowBandValue,
-      lowBandDeviationPercentage,
-      lowBandPeriodSize,
-      highBandValue,
-      highBandDeviationPercentage,
-      highBandPeriodSize);
-
-    return metricSeries.getSeries(n);
-  }
-
-  /**
-   * Returns a step function series.
-   *
-   * @param n                              size of the data series
-   * @param startValue                     start steady value
-   * @param steadyValueDeviationPercentage required devation in the steady state value
-   * @param steadyPeriodSlope              direction of monotonicity m > 0 for increasing and m < 0 for decreasing, m = 0 no increase or decrease.
-   * @param steadyPeriodMinSize            min size for step period
-   * @param steadyPeriodMaxSize            max size for step period.
-   * @param stepChangePercentage           Increase / decrease in steady state to denote a step in terms of deviation percentage from the last value.
-   * @param upwardStep                     upward or downward step.
-   * @return
-   */
-  public static double[] getStepFunctionSeries(int n,
-                                               double startValue,
-                                               double steadyValueDeviationPercentage,
-                                               double steadyPeriodSlope,
-                                               int steadyPeriodMinSize,
-                                               int steadyPeriodMaxSize,
-                                               double stepChangePercentage,
-                                               boolean upwardStep) {
-
-    StepFunctionMetricSeries metricSeries = new StepFunctionMetricSeries(startValue,
-      steadyValueDeviationPercentage,
-      steadyPeriodSlope,
-      steadyPeriodMinSize,
-      steadyPeriodMaxSize,
-      stepChangePercentage,
-      upwardStep);
-
-    return metricSeries.getSeries(n);
-  }
-
-  /**
-   * Series with small period of turbulence and then back to steady.
-   *
-   * @param n                                        size of the data series
-   * @param steadyStateValue                         steady state center value
-   * @param steadyStateDeviationPercentage           steady state deviation in percentage
-   * @param turbulentPeriodDeviationLowerPercentage  turbulent state lower limit in terms of percentage from centre value.
-   * @param turbulentPeriodDeviationHigherPercentage turbulent state higher limit in terms of percentage from centre value.
-   * @param turbulentPeriodLength                    turbulent period length (number of points)
-   * @param turbulentStatePosition                   Where the turbulent state should be 0 - at the beginning, 1 - in the middle (25% - 50% of the series), 2 - at the end of the series.
-   * @return
-   */
-  public static double[] getSteadySeriesWithTurbulentPeriod(int n,
-                                                            double steadyStateValue,
-                                                            double steadyStateDeviationPercentage,
-                                                            double turbulentPeriodDeviationLowerPercentage,
-                                                            double turbulentPeriodDeviationHigherPercentage,
-                                                            int turbulentPeriodLength,
-                                                            int turbulentStatePosition
-  ) {
-
-
-    SteadyWithTurbulenceMetricSeries metricSeries = new SteadyWithTurbulenceMetricSeries(n,
-      steadyStateValue,
-      steadyStateDeviationPercentage,
-      turbulentPeriodDeviationLowerPercentage,
-      turbulentPeriodDeviationHigherPercentage,
-      turbulentPeriodLength,
-      turbulentStatePosition);
-
-    return metricSeries.getSeries(n);
-  }
-
-
-  public static double[] generateSeries(String type, int n, Map<String, String> configs) {
-
-    double[] series;
-    switch (type) {
-
-      case "normal":
-        series = createNormalSeries(n,
-          Double.parseDouble(configs.getOrDefault("mean", "0")),
-          Double.parseDouble(configs.getOrDefault("sd", "1")),
-          Double.parseDouble(configs.getOrDefault("outlierProbability", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationSDTimesLower", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationSDTimesHigher", "0")),
-          Boolean.parseBoolean(configs.getOrDefault("outlierOnRightEnd", "true")));
-        break;
-
-      case "uniform":
-        series = createUniformSeries(n,
-          Double.parseDouble(configs.getOrDefault("value", "10")),
-          Double.parseDouble(configs.getOrDefault("deviationPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierProbability", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationLowerPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationHigherPercentage", "0")),
-          Boolean.parseBoolean(configs.getOrDefault("outliersAboveValue", "true")));
-        break;
-
-      case "monotonic":
-        series = createMonotonicSeries(n,
-          Double.parseDouble(configs.getOrDefault("startValue", "10")),
-          Double.parseDouble(configs.getOrDefault("slope", "0")),
-          Double.parseDouble(configs.getOrDefault("deviationPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierProbability", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationLowerPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationHigherPercentage", "0")),
-          Boolean.parseBoolean(configs.getOrDefault("outliersAboveValue", "true")));
-        break;
-
-      case "dualband":
-        series = getDualBandSeries(n,
-          Double.parseDouble(configs.getOrDefault("lowBandValue", "10")),
-          Double.parseDouble(configs.getOrDefault("lowBandDeviationPercentage", "0")),
-          Integer.parseInt(configs.getOrDefault("lowBandPeriodSize", "0")),
-          Double.parseDouble(configs.getOrDefault("highBandValue", "10")),
-          Double.parseDouble(configs.getOrDefault("highBandDeviationPercentage", "0")),
-          Integer.parseInt(configs.getOrDefault("highBandPeriodSize", "0")));
-        break;
-
-      case "step":
-        series = getStepFunctionSeries(n,
-          Double.parseDouble(configs.getOrDefault("startValue", "10")),
-          Double.parseDouble(configs.getOrDefault("steadyValueDeviationPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("steadyPeriodSlope", "0")),
-          Integer.parseInt(configs.getOrDefault("steadyPeriodMinSize", "0")),
-          Integer.parseInt(configs.getOrDefault("steadyPeriodMaxSize", "0")),
-          Double.parseDouble(configs.getOrDefault("stepChangePercentage", "0")),
-          Boolean.parseBoolean(configs.getOrDefault("upwardStep", "true")));
-        break;
-
-      case "turbulence":
-        series = getSteadySeriesWithTurbulentPeriod(n,
-          Double.parseDouble(configs.getOrDefault("steadyStateValue", "10")),
-          Double.parseDouble(configs.getOrDefault("steadyStateDeviationPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("turbulentPeriodDeviationLowerPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("turbulentPeriodDeviationHigherPercentage", "10")),
-          Integer.parseInt(configs.getOrDefault("turbulentPeriodLength", "0")),
-          Integer.parseInt(configs.getOrDefault("turbulentStatePosition", "0")));
-        break;
-
-      default:
-        series = createNormalSeries(n,
-          0,
-          1,
-          0,
-          0,
-          0,
-          true);
-    }
-    return series;
-  }
-
-  public static AbstractMetricSeries generateSeries(String type, Map<String, String> configs) {
-
-    AbstractMetricSeries series;
-    switch (type) {
-
-      case "normal":
-        series = new NormalMetricSeries(Double.parseDouble(configs.getOrDefault("mean", "0")),
-          Double.parseDouble(configs.getOrDefault("sd", "1")),
-          Double.parseDouble(configs.getOrDefault("outlierProbability", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationSDTimesLower", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationSDTimesHigher", "0")),
-          Boolean.parseBoolean(configs.getOrDefault("outlierOnRightEnd", "true")));
-        break;
-
-      case "uniform":
-        series = new UniformMetricSeries(
-          Double.parseDouble(configs.getOrDefault("value", "10")),
-          Double.parseDouble(configs.getOrDefault("deviationPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierProbability", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationLowerPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationHigherPercentage", "0")),
-          Boolean.parseBoolean(configs.getOrDefault("outliersAboveValue", "true")));
-        break;
-
-      case "monotonic":
-        series = new MonotonicMetricSeries(
-          Double.parseDouble(configs.getOrDefault("startValue", "10")),
-          Double.parseDouble(configs.getOrDefault("slope", "0")),
-          Double.parseDouble(configs.getOrDefault("deviationPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierProbability", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationLowerPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("outlierDeviationHigherPercentage", "0")),
-          Boolean.parseBoolean(configs.getOrDefault("outliersAboveValue", "true")));
-        break;
-
-      case "dualband":
-        series = new DualBandMetricSeries(
-          Double.parseDouble(configs.getOrDefault("lowBandValue", "10")),
-          Double.parseDouble(configs.getOrDefault("lowBandDeviationPercentage", "0")),
-          Integer.parseInt(configs.getOrDefault("lowBandPeriodSize", "0")),
-          Double.parseDouble(configs.getOrDefault("highBandValue", "10")),
-          Double.parseDouble(configs.getOrDefault("highBandDeviationPercentage", "0")),
-          Integer.parseInt(configs.getOrDefault("highBandPeriodSize", "0")));
-        break;
-
-      case "step":
-        series = new StepFunctionMetricSeries(
-          Double.parseDouble(configs.getOrDefault("startValue", "10")),
-          Double.parseDouble(configs.getOrDefault("steadyValueDeviationPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("steadyPeriodSlope", "0")),
-          Integer.parseInt(configs.getOrDefault("steadyPeriodMinSize", "0")),
-          Integer.parseInt(configs.getOrDefault("steadyPeriodMaxSize", "0")),
-          Double.parseDouble(configs.getOrDefault("stepChangePercentage", "0")),
-          Boolean.parseBoolean(configs.getOrDefault("upwardStep", "true")));
-        break;
-
-      case "turbulence":
-        series = new SteadyWithTurbulenceMetricSeries(
-          Integer.parseInt(configs.getOrDefault("approxSeriesLength", "100")),
-          Double.parseDouble(configs.getOrDefault("steadyStateValue", "10")),
-          Double.parseDouble(configs.getOrDefault("steadyStateDeviationPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("turbulentPeriodDeviationLowerPercentage", "0")),
-          Double.parseDouble(configs.getOrDefault("turbulentPeriodDeviationHigherPercentage", "10")),
-          Integer.parseInt(configs.getOrDefault("turbulentPeriodLength", "0")),
-          Integer.parseInt(configs.getOrDefault("turbulentStatePosition", "0")));
-        break;
-
-      default:
-        series = new NormalMetricSeries(0,
-          1,
-          0,
-          0,
-          0,
-          true);
-    }
-    return series;
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/MetricSeriesGeneratorTest.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/MetricSeriesGeneratorTest.java
deleted file mode 100644
index 03537e4..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/MetricSeriesGeneratorTest.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.seriesgenerator;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-public class MetricSeriesGeneratorTest {
-
-  @Test
-  public void testUniformSeries() {
-
-    UniformMetricSeries metricSeries = new UniformMetricSeries(5, 0.2, 0, 0, 0, true);
-    Assert.assertTrue(metricSeries.nextValue() <= 6 && metricSeries.nextValue() >= 4);
-
-    double[] uniformSeries = MetricSeriesGeneratorFactory.createUniformSeries(50, 10, 0.2, 0.1, 0.4, 0.5, true);
-    Assert.assertTrue(uniformSeries.length == 50);
-
-    for (int i = 0; i < uniformSeries.length; i++) {
-      double value = uniformSeries[i];
-
-      if (value > 10 * 1.2) {
-        Assert.assertTrue(value >= 10 * 1.4 && value <= 10 * 1.6);
-      } else {
-        Assert.assertTrue(value >= 10 * 0.8 && value <= 10 * 1.2);
-      }
-    }
-  }
-
-  @Test
-  public void testNormalSeries() {
-    NormalMetricSeries metricSeries = new NormalMetricSeries(0, 1, 0, 0, 0, true);
-    Assert.assertTrue(metricSeries.nextValue() <= 3 && metricSeries.nextValue() >= -3);
-  }
-
-  @Test
-  public void testMonotonicSeries() {
-
-    MonotonicMetricSeries metricSeries = new MonotonicMetricSeries(0, 0.5, 0, 0, 0, 0, true);
-    Assert.assertTrue(metricSeries.nextValue() == 0);
-    Assert.assertTrue(metricSeries.nextValue() == 0.5);
-
-    double[] incSeries = MetricSeriesGeneratorFactory.createMonotonicSeries(20, 0, 0.5, 0, 0, 0, 0, true);
-    Assert.assertTrue(incSeries.length == 20);
-    for (int i = 0; i < incSeries.length; i++) {
-      Assert.assertTrue(incSeries[i] == i * 0.5);
-    }
-  }
-
-  @Test
-  public void testDualBandSeries() {
-    double[] dualBandSeries = MetricSeriesGeneratorFactory.getDualBandSeries(30, 5, 0.2, 5, 15, 0.3, 4);
-    Assert.assertTrue(dualBandSeries[0] >= 4 && dualBandSeries[0] <= 6);
-    Assert.assertTrue(dualBandSeries[4] >= 4 && dualBandSeries[4] <= 6);
-    Assert.assertTrue(dualBandSeries[5] >= 10.5 && dualBandSeries[5] <= 19.5);
-    Assert.assertTrue(dualBandSeries[8] >= 10.5 && dualBandSeries[8] <= 19.5);
-    Assert.assertTrue(dualBandSeries[9] >= 4 && dualBandSeries[9] <= 6);
-  }
-
-  @Test
-  public void testStepSeries() {
-    double[] stepSeries = MetricSeriesGeneratorFactory.getStepFunctionSeries(30, 10, 0, 0, 5, 5, 0.5, true);
-
-    Assert.assertTrue(stepSeries[0] == 10);
-    Assert.assertTrue(stepSeries[4] == 10);
-
-    Assert.assertTrue(stepSeries[5] == 10*1.5);
-    Assert.assertTrue(stepSeries[9] == 10*1.5);
-
-    Assert.assertTrue(stepSeries[10] == 10*1.5*1.5);
-    Assert.assertTrue(stepSeries[14] == 10*1.5*1.5);
-  }
-
-  @Test
-  public void testSteadySeriesWithTurbulence() {
-    double[] steadySeriesWithTurbulence = MetricSeriesGeneratorFactory.getSteadySeriesWithTurbulentPeriod(30, 5, 0, 1, 1, 5, 1);
-
-    int count = 0;
-    for (int i = 0; i < steadySeriesWithTurbulence.length; i++) {
-      if (steadySeriesWithTurbulence[i] == 10) {
-        count++;
-      }
-    }
-    Assert.assertTrue(count == 5);
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/MonotonicMetricSeries.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/MonotonicMetricSeries.java
deleted file mode 100644
index 8bd1a9b..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/MonotonicMetricSeries.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.seriesgenerator;
-
-import java.util.Random;
-
-public class MonotonicMetricSeries implements AbstractMetricSeries {
-
-  double startValue = 0.0;
-  double slope = 0.5;
-  double deviationPercentage = 0.0;
-  double outlierProbability = 0.0;
-  double outlierDeviationLowerPercentage = 0.0;
-  double outlierDeviationHigherPercentage = 0.0;
-  boolean outliersAboveValue = true;
-
-  Random random = new Random();
-  double nonOutlierProbability;
-
-  // y = mx + c
-  double y;
-  double m;
-  double x;
-  double c;
-
-  public MonotonicMetricSeries(double startValue,
-                               double slope,
-                               double deviationPercentage,
-                               double outlierProbability,
-                               double outlierDeviationLowerPercentage,
-                               double outlierDeviationHigherPercentage,
-                               boolean outliersAboveValue) {
-    this.startValue = startValue;
-    this.slope = slope;
-    this.deviationPercentage = deviationPercentage;
-    this.outlierProbability = outlierProbability;
-    this.outlierDeviationLowerPercentage = outlierDeviationLowerPercentage;
-    this.outlierDeviationHigherPercentage = outlierDeviationHigherPercentage;
-    this.outliersAboveValue = outliersAboveValue;
-    init();
-  }
-
-  private void init() {
-    y = startValue;
-    m = slope;
-    x = 1;
-    c = y - (m * x);
-    nonOutlierProbability = 1.0 - outlierProbability;
-  }
-
-  @Override
-  public double nextValue() {
-
-    double value;
-    double probability = random.nextDouble();
-
-    y = m * x + c;
-    if (probability <= nonOutlierProbability) {
-      double valueDeviationLowerLimit = y - deviationPercentage * y;
-      double valueDeviationHigherLimit = y + deviationPercentage * y;
-      value = valueDeviationLowerLimit + (valueDeviationHigherLimit - valueDeviationLowerLimit) * random.nextDouble();
-    } else {
-      if (outliersAboveValue) {
-        double outlierLowerLimit = y + outlierDeviationLowerPercentage * y;
-        double outlierUpperLimit = y + outlierDeviationHigherPercentage * y;
-        value = outlierLowerLimit + (outlierUpperLimit - outlierLowerLimit) * random.nextDouble();
-      } else {
-        double outlierLowerLimit = y - outlierDeviationLowerPercentage * y;
-        double outlierUpperLimit = y - outlierDeviationHigherPercentage * y;
-        value = outlierUpperLimit + (outlierLowerLimit - outlierUpperLimit) * random.nextDouble();
-      }
-    }
-    x++;
-    return value;
-  }
-
-  @Override
-  public double[] getSeries(int n) {
-    double[] series = new double[n];
-    for (int i = 0; i < n; i++) {
-      series[i] = nextValue();
-    }
-    return series;
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/NormalMetricSeries.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/NormalMetricSeries.java
deleted file mode 100644
index fdedb6e..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/NormalMetricSeries.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.seriesgenerator;
-
-import java.util.Random;
-
-public class NormalMetricSeries implements AbstractMetricSeries {
-
-  double mean = 0.0;
-  double sd = 1.0;
-  double outlierProbability = 0.0;
-  double outlierDeviationSDTimesLower = 0.0;
-  double outlierDeviationSDTimesHigher = 0.0;
-  boolean outlierOnRightEnd = true;
-
-  Random random = new Random();
-  double nonOutlierProbability;
-
-
-  public NormalMetricSeries(double mean,
-                            double sd,
-                            double outlierProbability,
-                            double outlierDeviationSDTimesLower,
-                            double outlierDeviationSDTimesHigher,
-                            boolean outlierOnRightEnd) {
-    this.mean = mean;
-    this.sd = sd;
-    this.outlierProbability = outlierProbability;
-    this.outlierDeviationSDTimesLower = outlierDeviationSDTimesLower;
-    this.outlierDeviationSDTimesHigher = outlierDeviationSDTimesHigher;
-    this.outlierOnRightEnd = outlierOnRightEnd;
-    init();
-  }
-
-  private void init() {
-    nonOutlierProbability = 1.0 - outlierProbability;
-  }
-
-  @Override
-  public double nextValue() {
-
-    double value;
-    double probability = random.nextDouble();
-
-    if (probability <= nonOutlierProbability) {
-      value = random.nextGaussian() * sd + mean;
-    } else {
-      if (outlierOnRightEnd) {
-        value = mean + (outlierDeviationSDTimesLower + (outlierDeviationSDTimesHigher - outlierDeviationSDTimesLower) * random.nextDouble()) * sd;
-      } else {
-        value = mean - (outlierDeviationSDTimesLower + (outlierDeviationSDTimesHigher - outlierDeviationSDTimesLower) * random.nextDouble()) * sd;
-      }
-    }
-    return value;
-  }
-
-  @Override
-  public double[] getSeries(int n) {
-    double[] series = new double[n];
-    for (int i = 0; i < n; i++) {
-      series[i] = nextValue();
-    }
-    return series;
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/SteadyWithTurbulenceMetricSeries.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/SteadyWithTurbulenceMetricSeries.java
deleted file mode 100644
index 403e599..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/SteadyWithTurbulenceMetricSeries.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.seriesgenerator;
-
-import java.util.Random;
-
-public class SteadyWithTurbulenceMetricSeries implements AbstractMetricSeries {
-
-  double steadyStateValue = 0.0;
-  double steadyStateDeviationPercentage = 0.0;
-  double turbulentPeriodDeviationLowerPercentage = 0.3;
-  double turbulentPeriodDeviationHigherPercentage = 0.5;
-  int turbulentPeriodLength = 5;
-  int turbulentStatePosition = 1;
-  int approximateSeriesLength = 10;
-
-  Random random = new Random();
-  double valueDeviationLowerLimit;
-  double valueDeviationHigherLimit;
-  double tPeriodLowerLimit;
-  double tPeriodUpperLimit;
-  int tPeriodStartIndex = 0;
-  int index = 0;
-
-  public SteadyWithTurbulenceMetricSeries(int approximateSeriesLength,
-                                          double steadyStateValue,
-                                          double steadyStateDeviationPercentage,
-                                          double turbulentPeriodDeviationLowerPercentage,
-                                          double turbulentPeriodDeviationHigherPercentage,
-                                          int turbulentPeriodLength,
-                                          int turbulentStatePosition) {
-    this.approximateSeriesLength = approximateSeriesLength;
-    this.steadyStateValue = steadyStateValue;
-    this.steadyStateDeviationPercentage = steadyStateDeviationPercentage;
-    this.turbulentPeriodDeviationLowerPercentage = turbulentPeriodDeviationLowerPercentage;
-    this.turbulentPeriodDeviationHigherPercentage = turbulentPeriodDeviationHigherPercentage;
-    this.turbulentPeriodLength = turbulentPeriodLength;
-    this.turbulentStatePosition = turbulentStatePosition;
-    init();
-  }
-
-  private void init() {
-
-    if (turbulentStatePosition == 1) {
-      tPeriodStartIndex = (int) (0.25 * approximateSeriesLength + (0.25 * approximateSeriesLength * random.nextDouble()));
-    } else if (turbulentStatePosition == 2) {
-      tPeriodStartIndex = approximateSeriesLength - turbulentPeriodLength;
-    }
-
-    valueDeviationLowerLimit = steadyStateValue - steadyStateDeviationPercentage * steadyStateValue;
-    valueDeviationHigherLimit = steadyStateValue + steadyStateDeviationPercentage * steadyStateValue;
-
-    tPeriodLowerLimit = steadyStateValue + turbulentPeriodDeviationLowerPercentage * steadyStateValue;
-    tPeriodUpperLimit = steadyStateValue + turbulentPeriodDeviationHigherPercentage * steadyStateValue;
-  }
-
-  @Override
-  public double nextValue() {
-
-    double value;
-
-    if (index >= tPeriodStartIndex && index <= (tPeriodStartIndex + turbulentPeriodLength)) {
-      value = tPeriodLowerLimit + (tPeriodUpperLimit - tPeriodLowerLimit) * random.nextDouble();
-    } else {
-      value = valueDeviationLowerLimit + (valueDeviationHigherLimit - valueDeviationLowerLimit) * random.nextDouble();
-    }
-    index++;
-    return value;
-  }
-
-  @Override
-  public double[] getSeries(int n) {
-
-    double[] series = new double[n];
-    int turbulentPeriodStartIndex = 0;
-
-    if (turbulentStatePosition == 1) {
-      turbulentPeriodStartIndex = (int) (0.25 * n + (0.25 * n * random.nextDouble()));
-    } else if (turbulentStatePosition == 2) {
-      turbulentPeriodStartIndex = n - turbulentPeriodLength;
-    }
-
-    double valueDevLowerLimit = steadyStateValue - steadyStateDeviationPercentage * steadyStateValue;
-    double valueDevHigherLimit = steadyStateValue + steadyStateDeviationPercentage * steadyStateValue;
-
-    double turbulentPeriodLowerLimit = steadyStateValue + turbulentPeriodDeviationLowerPercentage * steadyStateValue;
-    double turbulentPeriodUpperLimit = steadyStateValue + turbulentPeriodDeviationHigherPercentage * steadyStateValue;
-
-    for (int i = 0; i < n; i++) {
-      if (i >= turbulentPeriodStartIndex && i < (turbulentPeriodStartIndex + turbulentPeriodLength)) {
-        series[i] = turbulentPeriodLowerLimit + (turbulentPeriodUpperLimit - turbulentPeriodLowerLimit) * random.nextDouble();
-      } else {
-        series[i] = valueDevLowerLimit + (valueDevHigherLimit - valueDevLowerLimit) * random.nextDouble();
-      }
-    }
-
-    return series;
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/StepFunctionMetricSeries.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/StepFunctionMetricSeries.java
deleted file mode 100644
index c91eac9..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/StepFunctionMetricSeries.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.seriesgenerator;
-
-import java.util.Random;
-
-public class StepFunctionMetricSeries implements AbstractMetricSeries {
-
-  double startValue = 0.0;
-  double steadyValueDeviationPercentage = 0.0;
-  double steadyPeriodSlope = 0.5;
-  int steadyPeriodMinSize = 10;
-  int steadyPeriodMaxSize = 20;
-  double stepChangePercentage = 0.0;
-  boolean upwardStep = true;
-
-  Random random = new Random();
-
-  // y = mx + c
-  double y;
-  double m;
-  double x;
-  double c;
-  int currentStepSize;
-  int currentIndex;
-
-  public StepFunctionMetricSeries(double startValue,
-                                  double steadyValueDeviationPercentage,
-                                  double steadyPeriodSlope,
-                                  int steadyPeriodMinSize,
-                                  int steadyPeriodMaxSize,
-                                  double stepChangePercentage,
-                                  boolean upwardStep) {
-    this.startValue = startValue;
-    this.steadyValueDeviationPercentage = steadyValueDeviationPercentage;
-    this.steadyPeriodSlope = steadyPeriodSlope;
-    this.steadyPeriodMinSize = steadyPeriodMinSize;
-    this.steadyPeriodMaxSize = steadyPeriodMaxSize;
-    this.stepChangePercentage = stepChangePercentage;
-    this.upwardStep = upwardStep;
-    init();
-  }
-
-  private void init() {
-    y = startValue;
-    m = steadyPeriodSlope;
-    x = 1;
-    c = y - (m * x);
-
-    currentStepSize = (int) (steadyPeriodMinSize + (steadyPeriodMaxSize - steadyPeriodMinSize) * random.nextDouble());
-    currentIndex = 0;
-  }
-
-  @Override
-  public double nextValue() {
-
-    double value = 0.0;
-
-    if (currentIndex < currentStepSize) {
-      y = m * x + c;
-      double valueDeviationLowerLimit = y - steadyValueDeviationPercentage * y;
-      double valueDeviationHigherLimit = y + steadyValueDeviationPercentage * y;
-      value = valueDeviationLowerLimit + (valueDeviationHigherLimit - valueDeviationLowerLimit) * random.nextDouble();
-      x++;
-      currentIndex++;
-    }
-
-    if (currentIndex == currentStepSize) {
-      currentIndex = 0;
-      currentStepSize = (int) (steadyPeriodMinSize + (steadyPeriodMaxSize - steadyPeriodMinSize) * random.nextDouble());
-      if (upwardStep) {
-        y = y + stepChangePercentage * y;
-      } else {
-        y = y - stepChangePercentage * y;
-      }
-      x = 1;
-      c = y - (m * x);
-    }
-
-    return value;
-  }
-
-  @Override
-  public double[] getSeries(int n) {
-    double[] series = new double[n];
-    for (int i = 0; i < n; i++) {
-      series[i] = nextValue();
-    }
-    return series;
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/UniformMetricSeries.java b/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/UniformMetricSeries.java
deleted file mode 100644
index 6122f82..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/java/org/apache/ambari/metrics/adservice/seriesgenerator/UniformMetricSeries.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.seriesgenerator;
-
-import java.util.Random;
-
-public class UniformMetricSeries implements AbstractMetricSeries {
-
-  double value = 0.0;
-  double deviationPercentage = 0.0;
-  double outlierProbability = 0.0;
-  double outlierDeviationLowerPercentage = 0.0;
-  double outlierDeviationHigherPercentage = 0.0;
-  boolean outliersAboveValue= true;
-
-  Random random = new Random();
-  double valueDeviationLowerLimit;
-  double valueDeviationHigherLimit;
-  double outlierLeftLowerLimit;
-  double outlierLeftHigherLimit;
-  double outlierRightLowerLimit;
-  double outlierRightUpperLimit;
-  double nonOutlierProbability;
-
-
-  public UniformMetricSeries(double value,
-                             double deviationPercentage,
-                             double outlierProbability,
-                             double outlierDeviationLowerPercentage,
-                             double outlierDeviationHigherPercentage,
-                             boolean outliersAboveValue) {
-    this.value = value;
-    this.deviationPercentage = deviationPercentage;
-    this.outlierProbability = outlierProbability;
-    this.outlierDeviationLowerPercentage = outlierDeviationLowerPercentage;
-    this.outlierDeviationHigherPercentage = outlierDeviationHigherPercentage;
-    this.outliersAboveValue = outliersAboveValue;
-    init();
-  }
-
-  private void init() {
-    valueDeviationLowerLimit = value - deviationPercentage * value;
-    valueDeviationHigherLimit = value + deviationPercentage * value;
-
-    outlierLeftLowerLimit = value - outlierDeviationHigherPercentage * value;
-    outlierLeftHigherLimit = value - outlierDeviationLowerPercentage * value;
-    outlierRightLowerLimit = value + outlierDeviationLowerPercentage * value;
-    outlierRightUpperLimit = value + outlierDeviationHigherPercentage * value;
-
-    nonOutlierProbability = 1.0 - outlierProbability;
-  }
-
-  @Override
-  public double nextValue() {
-
-    double value;
-    double probability = random.nextDouble();
-
-    if (probability <= nonOutlierProbability) {
-      value = valueDeviationLowerLimit + (valueDeviationHigherLimit - valueDeviationLowerLimit) * random.nextDouble();
-    } else {
-      if (!outliersAboveValue) {
-        value = outlierLeftLowerLimit + (outlierLeftHigherLimit - outlierLeftLowerLimit) * random.nextDouble();
-      } else {
-        value = outlierRightLowerLimit + (outlierRightUpperLimit - outlierRightLowerLimit) * random.nextDouble();
-      }
-    }
-    return value;
-  }
-
-  @Override
-  public double[] getSeries(int n) {
-    double[] series = new double[n];
-    for (int i = 0; i < n; i++) {
-      series[i] = nextValue();
-    }
-    return series;
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/resources/config.yaml b/ambari-metrics-anomaly-detection-service/src/test/resources/config.yaml
deleted file mode 100644
index 6b09499..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/resources/config.yaml
+++ /dev/null
@@ -1,35 +0,0 @@
-#Licensed under the Apache License, Version 2.0 (the "License");
-#you may not use this file except in compliance with the License.
-#You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-#Unless required by applicable law or agreed to in writing, software
-#distributed under the License is distributed on an "AS IS" BASIS,
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#See the License for the specific language governing permissions and
-#limitations under the License.
-
-metricDefinitionService:
-  inputDefinitionDirectory: /etc/ambari-metrics-anomaly-detection/conf/definitionDirectory
-
-metricsCollector:
-  hosts: host1,host2
-  port: 6188
-  protocol: http
-  metadataEndpoint: /ws/v1/timeline/metrics/metadata/key
-
-adQueryService:
-  anomalyDataTtl: 604800
-
-metricDefinitionDB:
-  # force checksum verification of all data that is read from the file system on behalf of a particular read
-  verifyChecksums: true
-  # raise an error as soon as it detects an internal corruption
-  performParanoidChecks: false
-  # Path to Level DB directory
-  dbDirPath: /var/lib/ambari-metrics-anomaly-detection/
-
-spark:
-  mode: standalone
-  masterHostPort: localhost:7077
\ No newline at end of file
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionAppConfigTest.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionAppConfigTest.scala
deleted file mode 100644
index 76391a0..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/AnomalyDetectionAppConfigTest.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.app
-
-import java.io.File
-import java.net.URL
-
-import javax.validation.Validator
-
-import org.scalatest.FunSuite
-
-import com.fasterxml.jackson.databind.ObjectMapper
-import com.fasterxml.jackson.datatype.guava.GuavaModule
-
-import io.dropwizard.configuration.YamlConfigurationFactory
-import io.dropwizard.jersey.validation.Validators
-
-class AnomalyDetectionAppConfigTest extends FunSuite {
-
-  test("testConfiguration") {
-
-    val classLoader = getClass.getClassLoader
-    val url: URL = classLoader.getResource("config.yaml")
-    val file = new File(url.getFile)
-
-    val objectMapper: ObjectMapper = new ObjectMapper()
-    objectMapper.registerModule(new GuavaModule)
-    val validator: Validator = Validators.newValidator
-    val factory: YamlConfigurationFactory[AnomalyDetectionAppConfig] =
-      new YamlConfigurationFactory[AnomalyDetectionAppConfig](classOf[AnomalyDetectionAppConfig], validator, objectMapper, "")
-    val config = factory.build(file)
-
-    assert(config.isInstanceOf[AnomalyDetectionAppConfig])
-
-    assert(config.getMetricDefinitionServiceConfiguration.getInputDefinitionDirectory ==
-      "/etc/ambari-metrics-anomaly-detection/conf/definitionDirectory")
-
-    assert(config.getMetricCollectorConfiguration.getHosts == "host1,host2")
-    assert(config.getMetricCollectorConfiguration.getPort == "6188")
-
-    assert(config.getAdServiceConfiguration.getAnomalyDataTtl == 604800)
-
-    assert(config.getMetricDefinitionDBConfiguration.getDbDirPath == "/var/lib/ambari-metrics-anomaly-detection/")
-    assert(config.getMetricDefinitionDBConfiguration.getVerifyChecksums)
-    assert(!config.getMetricDefinitionDBConfiguration.getPerformParanoidChecks)
-
-    assert(config.getSparkConfiguration.getMode.equals("standalone"))
-    assert(config.getSparkConfiguration.getMasterHostPort.equals("localhost:7077"))
-
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/DefaultADResourceSpecTest.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/DefaultADResourceSpecTest.scala
deleted file mode 100644
index 7330ff9..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/DefaultADResourceSpecTest.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.app
-
-import java.time.LocalDateTime
-
-import javax.ws.rs.client.Client
-import javax.ws.rs.core.MediaType.APPLICATION_JSON
-
-import org.apache.ambari.metrics.adservice.app.DropwizardAppRuleHelper.withAppRunning
-import org.glassfish.jersey.client.ClientProperties.{CONNECT_TIMEOUT, READ_TIMEOUT}
-import org.glassfish.jersey.client.{ClientConfig, JerseyClientBuilder}
-import org.glassfish.jersey.filter.LoggingFilter
-import org.glassfish.jersey.jaxb.internal.XmlJaxbElementProvider
-import org.joda.time.DateTime
-import org.scalatest.{FunSpec, Matchers}
-
-import com.google.common.io.Resources
-
-class DefaultADResourceSpecTest extends FunSpec with Matchers {
-
-  describe("/anomaly") {
-    it("Must return default message") {
-      withAppRunning(classOf[AnomalyDetectionApp], Resources.getResource("config.yaml").getPath) { rule =>
-        val json = client.target(s"http://localhost:${rule.getLocalPort}/anomaly")
-          .request().accept(APPLICATION_JSON).buildGet().invoke(classOf[String])
-        val dtf = java.time.format.DateTimeFormatter.ofPattern("yyyy/MM/dd HH:mm")
-        val now = LocalDateTime.now
-        assert(json == "{\"message\":\"Anomaly Detection Service!\"," + "\"today\":\"" + now + "\"}")
-      }
-    }
-  }
-
-  def client: Client = {
-    val config = new ClientConfig()
-    config.register(classOf[LoggingFilter])
-    config.register(classOf[XmlJaxbElementProvider.App])
-    config.property(CONNECT_TIMEOUT, 5000)
-    config.property(READ_TIMEOUT, 10000)
-    JerseyClientBuilder.createClient(config)
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/DropwizardAppRuleHelper.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/DropwizardAppRuleHelper.scala
deleted file mode 100644
index 6017bb4..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/DropwizardAppRuleHelper.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.app
-
-import org.junit.runner.Description
-
-import io.dropwizard.Configuration
-import io.dropwizard.testing.ConfigOverride
-import io.dropwizard.testing.junit.DropwizardAppRule
-
-import scala.collection.mutable
-
-object DropwizardAppRuleHelper {
-
-  def withAppRunning[C <: Configuration](serviceClass: Class[_ <: io.dropwizard.Application[C]],
-                                         configPath: String, configOverrides: ConfigOverride*)
-                                        (fn: (DropwizardAppRule[C]) => Unit) {
-    val overrides = new mutable.ListBuffer[ConfigOverride]
-    configOverrides.foreach { o => overrides += o }
-    val rule = new DropwizardAppRule(serviceClass, configPath, overrides.toList: _*)
-    rule.apply(() => fn(rule), Description.EMPTY).evaluate()
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/DropwizardResourceTestRuleHelper.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/DropwizardResourceTestRuleHelper.scala
deleted file mode 100644
index f896db4..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/app/DropwizardResourceTestRuleHelper.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.app
-
-import org.junit.runner.Description
-
-import io.dropwizard.testing.junit.ResourceTestRule
-
-object DropwizardResourceTestRuleHelper {
-  def withResourceTestRule(configBlock: (ResourceTestRule.Builder) => Unit)(testBlock: (ResourceTestRule) => Unit) {
-    val builder = new ResourceTestRule.Builder()
-    configBlock(builder)
-    val rule = builder.build()
-    rule.apply(() => {
-      testBlock(rule)
-    }, Description.EMPTY).evaluate()
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/db/PhoenixAnomalyStoreAccessorTest.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/db/PhoenixAnomalyStoreAccessorTest.scala
deleted file mode 100644
index 142e98a..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/db/PhoenixAnomalyStoreAccessorTest.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.metrics.adservice.db
-
-import org.scalatest.FunSuite
-
-class PhoenixAnomalyStoreAccessorTest extends FunSuite {
-
-  test("testInitAnomalyMetricSchema") {
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/leveldb/LevelDBDataSourceTest.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/leveldb/LevelDBDataSourceTest.scala
deleted file mode 100644
index 9757d76..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/leveldb/LevelDBDataSourceTest.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package org.apache.ambari.metrics.adservice.leveldb
-
-import java.io.File
-
-import org.apache.ambari.metrics.adservice.app.AnomalyDetectionAppConfig
-import org.apache.ambari.metrics.adservice.configuration.MetricDefinitionDBConfiguration
-import org.iq80.leveldb.util.FileUtils
-import org.mockito.Mockito.when
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-import org.scalatest.mockito.MockitoSugar
-
-class LevelDBDataSourceTest extends FunSuite with BeforeAndAfter with Matchers with MockitoSugar {
-
-  var db: LevelDBDataSource = _
-  var file : File = FileUtils.createTempDir("adservice-leveldb-test")
-
-  before {
-    val appConfig: AnomalyDetectionAppConfig = mock[AnomalyDetectionAppConfig]
-    val mdConfig : MetricDefinitionDBConfiguration = mock[MetricDefinitionDBConfiguration]
-
-    when(appConfig.getMetricDefinitionDBConfiguration).thenReturn(mdConfig)
-    when(mdConfig.getVerifyChecksums).thenReturn(true)
-    when(mdConfig.getPerformParanoidChecks).thenReturn(false)
-    when(mdConfig.getDbDirPath).thenReturn(file.getAbsolutePath)
-
-    db = new LevelDBDataSource(appConfig)
-    db.initialize()
-  }
-
-  test("testOperations") {
-    db.put("Hello".getBytes(), "World".getBytes())
-    assert(db.get("Hello".getBytes()).get.sameElements("World".getBytes()))
-    db.update(Seq("Hello".getBytes()), Seq(("Hello".getBytes(), "Mars".getBytes())))
-    assert(db.get("Hello".getBytes()).get.sameElements("Mars".getBytes()))
-  }
-
-  after {
-    FileUtils.deleteRecursively(file)
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/metadata/AMSMetadataProviderTest.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/metadata/AMSMetadataProviderTest.scala
deleted file mode 100644
index 79366b1..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/metadata/AMSMetadataProviderTest.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import java.util
-
-import org.apache.ambari.metrics.adservice.configuration.MetricCollectorConfiguration
-import org.scalatest.FunSuite
-
-class AMSMetadataProviderTest extends FunSuite {
-
-  test("testFromTimelineMetricKey") {
-    val timelineMetricKeys: java.util.Set[java.util.Map[String, String]] = new java.util.HashSet[java.util.Map[String, String]]()
-
-    val uuid: Array[Byte] = Array.empty[Byte]
-
-    for (i <- 1 to 3) {
-      val keyMap: java.util.Map[String, String] = new util.HashMap[String, String]()
-      keyMap.put("metricName", "M" + i)
-      keyMap.put("appId", "App")
-      keyMap.put("hostname", "H")
-      keyMap.put("uuid", new String(uuid))
-      timelineMetricKeys.add(keyMap)
-    }
-
-    val aMSMetadataProvider : ADMetadataProvider = new ADMetadataProvider(new MetricCollectorConfiguration)
-
-    val metricKeys : Set[MetricKey] = aMSMetadataProvider.getMetricKeys(timelineMetricKeys)
-    assert(metricKeys.size == 3)
-  }
-
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinitionServiceTest.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinitionServiceTest.scala
deleted file mode 100644
index d3454f2..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/metadata/MetricDefinitionServiceTest.scala
+++ /dev/null
@@ -1,130 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements.  See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership.  The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License.  You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import org.apache.ambari.metrics.adservice.app.AnomalyDetectionAppConfig
-import org.apache.ambari.metrics.adservice.db.AdMetadataStoreAccessor
-import org.easymock.EasyMock.{anyObject, expect, expectLastCall, replay}
-import org.scalatest.FunSuite
-import org.scalatest.easymock.EasyMockSugar
-
-class MetricDefinitionServiceTest extends FunSuite {
-
-  test("testAddDefinition") {
-
-    val definitions : scala.collection.mutable.MutableList[MetricSourceDefinition] = scala.collection.mutable.MutableList.empty[MetricSourceDefinition]
-
-    for (i <- 1 to 3) {
-      val msd1 : MetricSourceDefinition = new MetricSourceDefinition("TestDefinition" + i, "testAppId", MetricSourceDefinitionType.API)
-      definitions.+=(msd1)
-    }
-
-    val newDef : MetricSourceDefinition = new MetricSourceDefinition("NewDefinition", "testAppId", MetricSourceDefinitionType.API)
-
-    val adMetadataStoreAccessor: AdMetadataStoreAccessor = EasyMockSugar.niceMock[AdMetadataStoreAccessor]
-    expect(adMetadataStoreAccessor.getSavedInputDefinitions).andReturn(definitions.toList).once()
-    expect(adMetadataStoreAccessor.saveInputDefinition(newDef)).andReturn(true).once()
-    replay(adMetadataStoreAccessor)
-
-    val metricDefinitionService: MetricDefinitionServiceImpl = new MetricDefinitionServiceImpl(new AnomalyDetectionAppConfig, adMetadataStoreAccessor)
-
-    metricDefinitionService.setAdMetadataStoreAccessor(adMetadataStoreAccessor)
-
-    metricDefinitionService.addDefinition(newDef)
-
-    assert(metricDefinitionService.metricSourceDefinitionMap.size == 4)
-    assert(metricDefinitionService.metricSourceDefinitionMap.get("testDefinition") != null)
-  }
-
-  test("testGetDefinitionByName") {
-    val definitions : scala.collection.mutable.MutableList[MetricSourceDefinition] = scala.collection.mutable.MutableList.empty[MetricSourceDefinition]
-
-    for (i <- 1 to 3) {
-      val msd1 : MetricSourceDefinition = new MetricSourceDefinition("TestDefinition" + i, "testAppId", MetricSourceDefinitionType.API)
-      definitions.+=(msd1)
-    }
-
-    val adMetadataStoreAccessor: AdMetadataStoreAccessor = EasyMockSugar.niceMock[AdMetadataStoreAccessor]
-    expect(adMetadataStoreAccessor.getSavedInputDefinitions).andReturn(definitions.toList).once()
-    replay(adMetadataStoreAccessor)
-
-    val metricDefinitionService: MetricDefinitionServiceImpl = new MetricDefinitionServiceImpl(new AnomalyDetectionAppConfig, adMetadataStoreAccessor)
-
-    metricDefinitionService.setAdMetadataStoreAccessor(adMetadataStoreAccessor)
-    for (i <- 1 to 3) {
-      val definition: MetricSourceDefinition = metricDefinitionService.getDefinitionByName("TestDefinition" + i)
-      assert(definition != null)
-    }
-  }
-
-  test("testGetDefinitionByAppId") {
-    val definitions : scala.collection.mutable.MutableList[MetricSourceDefinition] = scala.collection.mutable.MutableList.empty[MetricSourceDefinition]
-
-    for (i <- 1 to 3) {
-      var msd1 : MetricSourceDefinition = null
-      if (i == 2) {
-        msd1 = new MetricSourceDefinition("TestDefinition" + i, null, MetricSourceDefinitionType.API)
-      } else {
-        msd1 = new MetricSourceDefinition("TestDefinition" + i, "testAppId", MetricSourceDefinitionType.API)
-      }
-      definitions.+=(msd1)
-    }
-
-    val adMetadataStoreAccessor: AdMetadataStoreAccessor = EasyMockSugar.niceMock[AdMetadataStoreAccessor]
-    expect(adMetadataStoreAccessor.getSavedInputDefinitions).andReturn(definitions.toList).once()
-    replay(adMetadataStoreAccessor)
-
-    val metricDefinitionService: MetricDefinitionServiceImpl = new MetricDefinitionServiceImpl(new AnomalyDetectionAppConfig, adMetadataStoreAccessor)
-
-    metricDefinitionService.setAdMetadataStoreAccessor(adMetadataStoreAccessor)
-    val definitionsByAppId: List[MetricSourceDefinition] = metricDefinitionService.getDefinitionByAppId("testAppId")
-    assert(definitionsByAppId.size == 2)
-  }
-
-  test("testDeleteDefinitionByName") {
-    val definitions : scala.collection.mutable.MutableList[MetricSourceDefinition] = scala.collection.mutable.MutableList.empty[MetricSourceDefinition]
-
-    for (i <- 1 to 3) {
-      var msd1 : MetricSourceDefinition = null
-      if (i == 2) {
-        msd1 = new MetricSourceDefinition("TestDefinition" + i, null, MetricSourceDefinitionType.CONFIG)
-      } else {
-        msd1 = new MetricSourceDefinition("TestDefinition" + i, "testAppId", MetricSourceDefinitionType.API)
-      }
-      definitions.+=(msd1)
-    }
-
-    val adMetadataStoreAccessor: AdMetadataStoreAccessor = EasyMockSugar.niceMock[AdMetadataStoreAccessor]
-    expect(adMetadataStoreAccessor.getSavedInputDefinitions).andReturn(definitions.toList).once()
-    expect(adMetadataStoreAccessor.removeInputDefinition(anyObject[String])).andReturn(true).times(2)
-    replay(adMetadataStoreAccessor)
-
-    val metricDefinitionService: MetricDefinitionServiceImpl = new MetricDefinitionServiceImpl(new AnomalyDetectionAppConfig, adMetadataStoreAccessor)
-
-    metricDefinitionService.setAdMetadataStoreAccessor(adMetadataStoreAccessor)
-
-    var success: Boolean = metricDefinitionService.deleteDefinitionByName("TestDefinition1")
-    assert(success)
-    success = metricDefinitionService.deleteDefinitionByName("TestDefinition2")
-    assert(!success)
-    success = metricDefinitionService.deleteDefinitionByName("TestDefinition3")
-    assert(success)
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/metadata/MetricSourceDefinitionTest.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/metadata/MetricSourceDefinitionTest.scala
deleted file mode 100644
index c4d4dbc..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/metadata/MetricSourceDefinitionTest.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.metadata
-
-import org.apache.commons.lang.SerializationUtils
-import org.scalatest.FunSuite
-
-import com.fasterxml.jackson.databind.ObjectMapper
-import com.fasterxml.jackson.module.scala.DefaultScalaModule
-import org.apache.ambari.metrics.adservice.app.ADServiceScalaModule
-
-class MetricSourceDefinitionTest extends FunSuite {
-
-  test("createNewMetricSourceDefinition") {
-    val msd : MetricSourceDefinition = new MetricSourceDefinition("testDefinition", "testAppId", MetricSourceDefinitionType.API)
-
-    assert(msd.definitionName == "testDefinition")
-    assert(msd.appId == "testAppId")
-    assert(msd.definitionSource == MetricSourceDefinitionType.API)
-
-    assert(msd.hosts.isEmpty)
-    assert(msd.metricDefinitions.isEmpty)
-    assert(msd.associatedAnomalySubsystems.isEmpty)
-    assert(msd.relatedDefinitions.isEmpty)
-  }
-
-  test("testAddMetricDefinition") {
-    val msd : MetricSourceDefinition = new MetricSourceDefinition("testDefinition", "testAppId", MetricSourceDefinitionType.API)
-    assert(msd.metricDefinitions.isEmpty)
-
-    msd.addMetricDefinition(MetricDefinition("TestMetric", "TestApp", List.empty[String]))
-    assert(msd.metricDefinitions.nonEmpty)
-  }
-
-  test("testEquals") {
-    val msd1 : MetricSourceDefinition = new MetricSourceDefinition("testDefinition", "testAppId", MetricSourceDefinitionType.API)
-    val msd2 : MetricSourceDefinition = new MetricSourceDefinition("testDefinition", "testAppId2", MetricSourceDefinitionType.API)
-    assert(msd1 == msd2)
-
-    val msd3 : MetricSourceDefinition = new MetricSourceDefinition("testDefinition1", "testAppId", MetricSourceDefinitionType.API)
-    val msd4 : MetricSourceDefinition = new MetricSourceDefinition("testDefinition2", "testAppId2", MetricSourceDefinitionType.API)
-    assert(msd3 != msd4)
-  }
-
-  test("testRemoveMetricDefinition") {
-    val msd : MetricSourceDefinition = new MetricSourceDefinition("testDefinition", "testAppId", MetricSourceDefinitionType.API)
-    assert(msd.metricDefinitions.isEmpty)
-
-    msd.addMetricDefinition(MetricDefinition("TestMetric", "TestApp", List.empty[String]))
-    assert(msd.metricDefinitions.nonEmpty)
-
-    msd.removeMetricDefinition(MetricDefinition("TestMetric", "TestApp", List.empty[String]))
-    assert(msd.metricDefinitions.isEmpty)
-  }
-
-  test("serializeDeserialize") {
-
-    val msd : MetricSourceDefinition = new MetricSourceDefinition("testDefinition", "A1", MetricSourceDefinitionType.API)
-    msd.hosts = List("h1")
-    msd.addMetricDefinition(MetricDefinition("M1", null, List("h2")))
-    msd.addMetricDefinition(MetricDefinition("M1", "A2", null))
-
-    val msdByteArray: Array[Byte] = SerializationUtils.serialize(msd)
-    assert(msdByteArray.nonEmpty)
-
-    val msd2: MetricSourceDefinition = SerializationUtils.deserialize(msdByteArray).asInstanceOf[MetricSourceDefinition]
-    assert(msd2 != null)
-    assert(msd == msd2)
-
-    val mapper : ObjectMapper = new ObjectMapper()
-    mapper.registerModule(new ADServiceScalaModule)
-
-    System.out.print(mapper.writeValueAsString(msd))
-
-  }
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/model/RangeTest.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/model/RangeTest.scala
deleted file mode 100644
index 16f4951..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/model/RangeTest.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.model
-
-import org.apache.ambari.metrics.adservice.model
-import org.scalatest.FlatSpec
-
-class RangeTest extends FlatSpec {
-
-  "A Range " should " return true for inner and boundary values" in {
-    val range : model.Range = model.Range(4,6)
-    assert(range.withinRange(5))
-    assert(range.withinRange(6))
-    assert(range.withinRange(4))
-    assert(!range.withinRange(7))
-  }
-
-  it should "accept same lower and higher range values" in {
-    val range : model.Range = model.Range(4,4)
-    assert(range.withinRange(4))
-  }
-
-}
diff --git a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/model/SeasonTest.scala b/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/model/SeasonTest.scala
deleted file mode 100644
index a661c05..0000000
--- a/ambari-metrics-anomaly-detection-service/src/test/scala/org/apache/ambari/metrics/adservice/model/SeasonTest.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.metrics.adservice.model
-
-import java.util.Calendar
-
-import org.apache.ambari.metrics.adservice.model
-import org.scalatest.FunSuite
-
-class SeasonTest extends FunSuite {
-
-  test("testBelongsTo") {
-
-    //Create Season for weekdays. Mon to Friday and 9AM - 5PM
-    var season : Season = Season(model.Range(Calendar.MONDAY,Calendar.FRIDAY), model.Range(9,17))
-
-    //Try with a timestamp on a Monday, @ 9AM.
-    val c = Calendar.getInstance
-    c.set(2017, Calendar.OCTOBER, 30, 9, 0, 0)
-    assert(season.belongsTo(c.getTimeInMillis))
-
-    c.set(2017, Calendar.OCTOBER, 30, 18, 0, 0)
-    assert(!season.belongsTo(c.getTimeInMillis))
-
-    //Try with a timestamp on a Sunday, @ 9AM.
-    c.set(2017, Calendar.OCTOBER, 29, 9, 0, 0)
-    assert(!season.belongsTo(c.getTimeInMillis))
-
-    //Create Season for Monday 11AM - 12Noon.
-    season = Season(model.Range(Calendar.MONDAY,Calendar.MONDAY), model.Range(11,12))
-    c.set(2017, Calendar.OCTOBER, 30, 9, 0, 0)
-    assert(!season.belongsTo(c.getTimeInMillis))
-
-    c.set(2017, Calendar.OCTOBER, 30, 11, 30, 0)
-    assert(season.belongsTo(c.getTimeInMillis))
-
-
-    //Create Season from Friday to Monday and 9AM - 5PM
-    season = Season(model.Range(Calendar.FRIDAY,Calendar.MONDAY), model.Range(9,17))
-
-    //Try with a timestamp on a Monday, @ 9AM.
-    c.set(2017, Calendar.OCTOBER, 30, 9, 0, 0)
-    assert(season.belongsTo(c.getTimeInMillis))
-
-    //Try with a timestamp on a Sunday, @ 3PM.
-    c.set(2017, Calendar.OCTOBER, 29, 15, 0, 0)
-    assert(season.belongsTo(c.getTimeInMillis))
-
-    //Try with a timestamp on a Wednesday, @ 9AM.
-    c.set(2017, Calendar.NOVEMBER, 1, 9, 0, 0)
-    assert(!season.belongsTo(c.getTimeInMillis))
-  }
-
-  test("testEquals") {
-
-    var season1: Season =  Season(model.Range(4,5), model.Range(2,3))
-    var season2: Season =  Season(model.Range(4,5), model.Range(2,3))
-    assert(season1 == season2)
-
-    var season3: Season =  Season(model.Range(4,4), model.Range(2,3))
-    assert(!(season1 == season3))
-  }
-
-  test("testSerialize") {
-    val season1 : Season = Season(model.Range(Calendar.MONDAY,Calendar.FRIDAY), model.Range(9,17))
-
-    val seasonString = Season.toJson(season1)
-
-    val season2 : Season = Season.fromJson(seasonString)
-    assert(season1 == season2)
-
-    val season3 : Season = Season(model.Range(Calendar.MONDAY,Calendar.THURSDAY), model.Range(9,17))
-    assert(!(season2 == season3))
-
-  }
-
-}
diff --git a/ambari-metrics-assembly/pom.xml b/ambari-metrics-assembly/pom.xml
index b1a6430..557b218 100644
--- a/ambari-metrics-assembly/pom.xml
+++ b/ambari-metrics-assembly/pom.xml
@@ -42,7 +42,6 @@
     <storm-sink-legacy.dir>${project.basedir}/../ambari-metrics-storm-sink-legacy</storm-sink-legacy.dir>
     <flume-sink.dir>${project.basedir}/../ambari-metrics-flume-sink</flume-sink.dir>
     <kafka-sink.dir>${project.basedir}/../ambari-metrics-kafka-sink</kafka-sink.dir>
-    <anomaly-detection.dir>${project.basedir}/../ambari-metrics-anomaly-detection-service</anomaly-detection.dir>
     <python.ver>python &gt;= 2.6</python.ver>
     <python.devel>python-devel</python.devel>
     <deb.publisher>Apache</deb.publisher>
@@ -57,7 +56,6 @@
     <storm.sink.legacy.jar>ambari-metrics-storm-sink-legacy-with-common-${project.version}.jar</storm.sink.legacy.jar>
     <flume.sink.jar>ambari-metrics-flume-sink-with-common-${project.version}.jar</flume.sink.jar>
     <kafka.sink.jar>ambari-metrics-kafka-sink-with-common-${project.version}.jar</kafka.sink.jar>
-    <anomaly.detection.jar>ambari-metrics-anomaly-detection-service-${project.version}.jar</anomaly.detection.jar>
   </properties>
 
   <build>
@@ -141,22 +139,6 @@
             </configuration>
           </execution>
           <execution>
-            <id>anomaly-detection</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>single</goal>
-            </goals>
-            <configuration>
-              <attach>false</attach>
-              <finalName>ambari-metrics-anomaly-detection-${project.version}</finalName>
-              <appendAssemblyId>false</appendAssemblyId>
-              <descriptors>
-                <descriptor>${assemblydescriptor.anomaly-detection}</descriptor>
-              </descriptors>
-              <tarLongFileMode>gnu</tarLongFileMode>
-            </configuration>
-          </execution>
-          <execution>
             <id>hadoop-sink</id>
             <phase>prepare-package</phase>
             <goals>
@@ -656,82 +638,6 @@
                 </configuration>
               </execution>
 
-              <!--ambari-metrics-anomaly-detection-->
-              <execution>
-                <id>ambari-metrics-anomaly-detection</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>rpm</goal>
-                </goals>
-                <configuration>
-                <name>ambari-metrics-anomaly-detection</name>
-                <copyright>2012, Apache Software Foundation</copyright>
-                <group>Development</group>
-                <description>Maven Recipe: RPM Package.</description>
-                <autoRequires>false</autoRequires>
-
-
-                <defaultFilemode>644</defaultFilemode>
-                <defaultDirmode>755</defaultDirmode>
-                <defaultUsername>root</defaultUsername>
-                <defaultGroupname>root</defaultGroupname>
-
-                <postinstallScriptlet>
-                  <scriptFile>${project.build.directory}/resources/rpm/anomaly-detection/postinstall.sh</scriptFile>
-                  <fileEncoding>utf-8</fileEncoding>
-                </postinstallScriptlet>
-
-                <mappings>
-                  <mapping>
-                    <!--jars-->
-                    <directory>/usr/lib/ambari-metrics-anomaly-detection/</directory>
-                    <sources>
-                      <source>
-                        <location>
-                          ${anomaly-detection.dir}/target/ambari-metrics-anomaly-detection-service-${project.version}.jar
-                        </location>
-                      </source>
-                    </sources>
-                  </mapping>
-                  <mapping>
-                    <directory>/usr/lib/ambari-metrics-anomaly-detection/spark</directory>
-                    <sources>
-                      <source>
-                        <location>
-                          ${anomaly-detection.dir}/target/embedded/spark
-                        </location>
-                      </source>
-                    </sources>
-                  </mapping>
-                  <mapping>
-                    <directory>/usr/sbin</directory>
-                    <filemode>755</filemode>
-                    <username>root</username>
-                    <groupname>root</groupname>
-                    <directoryIncluded>false</directoryIncluded>
-                    <sources>
-                      <source>
-                        <location>${anomaly-detection.dir}/conf/unix/ambari-metrics-admanager</location>
-                        <filter>false</filter>
-                      </source>
-                    </sources>
-                  </mapping>
-                  <mapping>
-                    <directory>/etc/ambari-metrics-anomaly-detection/conf</directory>
-                    <configuration>true</configuration>
-                    <sources>
-                      <source>
-                        <location>${anomaly-detection.dir}/conf/unix/config.yaml</location>
-                      </source>
-                      <source>
-                        <location>${anomaly-detection.dir}/conf/unix/log4j.properties</location>
-                      </source>
-                    </sources>
-                  </mapping>
-                </mappings>
-                </configuration>
-              </execution>
-
             </executions>
           </plugin>
         </plugins>
@@ -850,13 +756,10 @@
                     <path>/etc/ambari-metrics-collector/conf</path>
                     <path>/etc/ambari-metrics-grafana/conf</path>
                     <path>/etc/ams-hbase/conf</path>
-                    <path>/etc/ambari-metrics-anomaly-detection/conf</path>
                     <path>/var/run/ams-hbase</path>
                     <path>/var/run/ambari-metrics-grafana</path>
                     <path>/var/log/ambari-metrics-grafana</path>
                     <path>/var/lib/ambari-metrics-collector</path>
-                    <path>/usr/lib/ambari-metrics-anomaly-detection</path>
-                    <path>/var/lib/ambari-metrics-anomaly-detection</path>
                     <path>/var/lib/ambari-metrics-monitor/lib</path>
                     <path>/var/lib/ambari-metrics-grafana</path>
                     <path>/usr/lib/ambari-metrics-hadoop-sink</path>
@@ -1075,49 +978,6 @@
                   </mapper>
                 </data>
 
-                <!-- Anomaly Detection -->
-                <data>
-                  <src>${anomaly-detection.dir}/target/${anomaly.detection.jar}</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <dirmode>644</dirmode>
-                    <prefix>/usr/lib/ambari-metrics-anomaly-detection</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <type>link</type>
-                  <linkName>/usr/lib/ambari-metrics-anomaly-detection/ambari-metrics-anomaly-detection-service.jar</linkName>
-                  <linkTarget>/usr/lib/ambari-metrics-anomaly-detection/${anomaly.detection.jar}</linkTarget>
-                  <symlink>true</symlink>
-                </data>
-                <data>
-                  <src>${anomaly-detection.dir}/target/embedded/spark</src>
-                  <type>directory</type>
-                  <mapper>
-                    <type>perm</type>
-                    <prefix>/usr/lib/ambari-metrics-anomaly-detection/spark</prefix>
-                    <filemode>644</filemode>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${anomaly-detection.dir}/conf/unix/config.yaml</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/etc/ambari-metrics-anomaly-detection/conf</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${anomaly-detection.dir}/conf/unix/log4j.properties</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/etc/ambari-metrics-anomaly-detection/conf</prefix>
-                  </mapper>
-                </data>
                 <!-- hadoop sink -->
 
                 <data>
@@ -1214,7 +1074,6 @@
         <assemblydescriptor.monitor>src/main/assembly/monitor.xml</assemblydescriptor.monitor>
         <assemblydescriptor.sink>src/main/assembly/sink.xml</assemblydescriptor.sink>
         <assemblydescriptor.grafana>src/main/assembly/grafana.xml</assemblydescriptor.grafana>
-        <assemblydescriptor.anomaly-detection>src/main/assembly/anomaly-detection.xml</assemblydescriptor.anomaly-detection>
 
         <packagingFormat>jar</packagingFormat>
       </properties>
@@ -1495,13 +1354,6 @@
       <artifactId>ambari-metrics-host-aggregator</artifactId>
       <version>${project.version}</version>
     </dependency>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-anomaly-detection-service</artifactId>
-      <version>${project.version}</version>
-      <type>pom</type>
-      <optional>true</optional>
-    </dependency>
   </dependencies>
 
 
diff --git a/ambari-metrics-assembly/src/main/assembly/anomaly-detection.xml b/ambari-metrics-assembly/src/main/assembly/anomaly-detection.xml
deleted file mode 100644
index b05aaf3..0000000
--- a/ambari-metrics-assembly/src/main/assembly/anomaly-detection.xml
+++ /dev/null
@@ -1,60 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1"
-          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-          xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1 http://maven.apache.org/xsd/assembly-1.1.1.xsd">
-  <id>anomaly-detection</id>
-  <formats>
-    <format>dir</format>
-    <format>tar.gz</format>
-  </formats>
-
-  <fileSets>
-    <fileSet>
-      <directory>${anomaly-detection.dir}/target/embedded/spark</directory>
-      <outputDirectory>anomaly-detection/spark</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${anomaly-detection.dir}/conf/unix</directory>
-      <outputDirectory>anomaly-detection/bin</outputDirectory>
-      <includes>
-        <include>ambari-metrics-admanager</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${anomaly-detection.dir}/conf/unix</directory>
-      <outputDirectory>anomaly-detection/conf</outputDirectory>
-      <includes>
-        <include>config.yaml</include>
-        <include>log4j.properties</include>
-      </includes>
-    </fileSet>
-  </fileSets>
-
-  <files>
-    <file>
-      <fileMode>644</fileMode>
-      <source>${anomaly-detection.dir}/target/ambari-metrics-anomaly-detection-service-${project.version}.jar
-      </source>
-      <outputDirectory>anomaly-detection</outputDirectory>
-    </file>
-  </files>
-</assembly>
\ No newline at end of file
diff --git a/ambari-metrics-assembly/src/main/package/rpm/anomaly-detection/postinstall.sh b/ambari-metrics-assembly/src/main/package/rpm/anomaly-detection/postinstall.sh
deleted file mode 100644
index 399c439..0000000
--- a/ambari-metrics-assembly/src/main/package/rpm/anomaly-detection/postinstall.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-
-ANOMALY_DETECTION_LINK_NAME="/usr/lib/ambari-metrics-anomaly-detection/ambari-metrics-anomaly-detection-service.jar"
-ANOMALY_DETECTION_JAR="/usr/lib/ambari-metrics-anomaly-detection/${anomaly.detection.jar}"
-
-JARS=(${ANOMALY_DETECTION_JAR})
-LINKS=(${ANOMALY_DETECTION_LINK_NAME})
-
-for index in ${!LINKS[*]}
-do
-  rm -f ${LINKS[$index]} ; ln -s ${JARS[$index]} ${LINKS[$index]}
-done
diff --git a/ambari-metrics-grafana/src/main/scripted.js b/ambari-metrics-grafana/src/main/scripted.js
deleted file mode 100644
index 298535f..0000000
--- a/ambari-metrics-grafana/src/main/scripted.js
+++ /dev/null
@@ -1,118 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/* global _ */
-
-/*
- * Complex scripted dashboard
- * This script generates a dashboard object that Grafana can load. It also takes a number of user
- * supplied URL parameters (in the ARGS variable)
- *
- * Return a dashboard object, or a function
- *
- * For async scripts, return a function, this function must take a single callback function as argument,
- * call this callback function with the dashboard object (look at scripted_async.js for an example)
- */
-
-'use strict';
-
-// accessible variables in this scope
-var window, document, ARGS, $, jQuery, moment, kbn;
-
-// Setup some variables
-var dashboard;
-
-// All url parameters are available via the ARGS object
-var ARGS;
-
-// Intialize a skeleton with nothing but a rows array and service object
-dashboard = {
-    rows : [],
-};
-
-// Set a title
-dashboard.title = 'Scripted dash';
-
-// Set default time
-// time can be overriden in the url using from/to parameters, but this is
-// handled automatically in grafana core during dashboard initialization
-
-
-var obj = JSON.parse(ARGS.anomalies);
-var metrics = obj.metrics;
-var rows = metrics.length
-
-dashboard.time = {
-    from: "now-1h",
-    to: "now"
-};
-
-var metricSet = new Set();
-
-for (var i = 0; i < rows; i++) {
-
-    var key = metrics[i].metricname;
-    if (metricSet.has(key)) {
-        continue;
-    }
-    metricSet.add(key)
-    var metricKeyElements = key.split(":");
-    var metricName = metricKeyElements[0];
-    var appId = metricKeyElements[1];
-    var hostname = metricKeyElements[2];
-
-    dashboard.rows.push({
-        title: 'Chart',
-        height: '300px',
-        panels: [
-            {
-                title: metricName,
-                type: 'graph',
-                span: 12,
-                fill: 1,
-                linewidth: 2,
-                targets: [
-                    {
-                        "aggregator": "none",
-                        "alias": metricName,
-                        "app": appId,
-                        "errors": {},
-                        "metric": metricName,
-                        "precision": "default",
-                        "refId": "A",
-                        "hosts": hostname
-                    }
-                ],
-                seriesOverrides: [
-                    {
-                        alias: '/random/',
-                        yaxis: 2,
-                        fill: 0,
-                        linewidth: 5
-                    }
-                ],
-                tooltip: {
-                    shared: true
-                }
-            }
-        ]
-    });
-}
-
-
-return dashboard;
diff --git a/ambari-metrics-timelineservice/pom.xml b/ambari-metrics-timelineservice/pom.xml
index e6a7e64..d06c0ea 100644
--- a/ambari-metrics-timelineservice/pom.xml
+++ b/ambari-metrics-timelineservice/pom.xml
@@ -348,7 +348,7 @@
     <dependency>
       <groupId>org.apache.ambari</groupId>
       <artifactId>ambari-metrics-common</artifactId>
-      <version>2.0.0.0-SNAPSHOT</version>
+      <version>${project.version}</version>
     </dependency>
 
     <dependency>
diff --git a/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricsService.java b/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricsService.java
index c2e9448..b09f876 100644
--- a/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricsService.java
+++ b/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricsService.java
@@ -160,10 +160,6 @@
               "start cache node", e);
         }
       }
-//      String kafkaServers = configuration.getKafkaServers();
-//      if (kafkaServers != null) {
-//        metricKafkaProducer = new MetricKafkaProducer(kafkaServers);
-//      }
 
       defaultTopNHostsLimit = Integer.parseInt(metricsConf.get(DEFAULT_TOPN_HOSTS_LIMIT, "20"));
       if (Boolean.parseBoolean(metricsConf.get(USE_GROUPBY_AGGREGATOR_QUERIES, "true"))) {
@@ -242,11 +238,6 @@
   }
 
   @Override
-  public TimelineMetrics getAnomalyMetrics(String method, long startTime, long endTime, Integer limit) throws SQLException {
-    return hBaseAccessor.getAnomalyMetricRecords(method, startTime, endTime, limit);
-  }
-
-  @Override
   public TimelineMetrics getTimelineMetrics(List<String> metricNames,
       List<String> hostnames, String applicationId, String instanceId,
       Long startTime, Long endTime, Precision precision, Integer limit,
@@ -415,14 +406,6 @@
       cache.putMetrics(metrics.getMetrics(), metricMetadataManager);
     }
 
-//    try {
-//      metricKafkaProducer.sendMetrics(metrics);
-////      if (metrics.getMetrics().size() != 0 && metrics.getMetrics().get(0).getAppId().equals("anomaly-engine-test-metric")) {
-////      }
-//    } catch (Exception e) {
-//      LOG.error(e);
-//    }
-
     return response;
   }
 
@@ -489,54 +472,6 @@
     return metricMetadataManager.getUuid(metricName, appId, instanceId, hostname);
   }
 
-  /**
-   * Given a metricName, appId, instanceId and optional hostname parameter, return a set of TimelineMetricKey objects
-   * that will have all the unique metric instances for the above parameter filter.
-   *
-   * @param metricName
-   * @param appId
-   * @param instanceId
-   * @param hosts
-   * @return
-   * @throws SQLException
-   * @throws IOException
-   */
-  @Override
-  public Set<Map<String, String>> getTimelineMetricKeys(String metricName, String appId, String instanceId, List<String> hosts)
-    throws SQLException, IOException {
-    Set<Map<String, String>> timelineMetricKeys = new HashSet<>();
-
-    if (CollectionUtils.isEmpty(hosts)) {
-      Set<String> hostsFromMetadata = new HashSet<>();
-      for (String host : metricMetadataManager.getHostedAppsCache().keySet()) {
-        if (metricMetadataManager.getHostedAppsCache().get(host).getHostedApps().contains(appId)) {
-          hostsFromMetadata.add(host);
-        }
-      }
-      for (String host : hostsFromMetadata) {
-        byte[] uuid = metricMetadataManager.getUuid(metricName, appId, instanceId, host);
-        Map<String, String> keyMap = new HashMap<>();
-        keyMap.put("metricName", metricName);
-        keyMap.put("appId", appId);
-        keyMap.put("hostname", host);
-        keyMap.put("uuid", new String(uuid));
-        timelineMetricKeys.add(keyMap);
-      }
-      return timelineMetricKeys;
-    } else {
-      for (String host : hosts) {
-        byte[] uuid = metricMetadataManager.getUuid(metricName, appId, instanceId, host);
-        Map<String, String> keyMap = new HashMap<>();
-        keyMap.put("metricName", metricName);
-        keyMap.put("appId", appId);
-        keyMap.put("hostname", host);
-        keyMap.put("uuid", new String(uuid));
-        timelineMetricKeys.add(keyMap);
-      }
-      return timelineMetricKeys;
-    }
-  }
-
   @Override
   public Map<String, Set<String>> getHostAppsMetadata() throws SQLException, IOException {
     Map<String, TimelineMetricHostMetadata> hostsMetadata = metricMetadataManager.getHostedAppsCache();
diff --git a/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java b/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index fc26f5d..fc59063 100644
--- a/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ b/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -48,9 +48,7 @@
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.TIMELINE_METRICS_PRECISION_TABLE_HBASE_BLOCKING_STORE_FILES;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.TIMELINE_METRIC_AGGREGATOR_SINK_CLASS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.ALTER_METRICS_METADATA_TABLE;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.ANOMALY_METRICS_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CONTAINER_METRICS_TABLE_NAME;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_ANOMALY_METRICS_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_CONTAINER_METRICS_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_HOSTED_APPS_METADATA_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_INSTANCE_HOST_TABLE_SQL;
@@ -59,7 +57,6 @@
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_METADATA_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_TABLE_SQL;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_TREND_ANOMALY_METRICS_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.DEFAULT_ENCODING;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.DEFAULT_TABLE_COMPRESSION;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.GET_HOSTED_APPS_METADATA_SQL;
@@ -75,9 +72,7 @@
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_RECORD_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.PHOENIX_TABLES;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.PHOENIX_TABLES_REGEX_PATTERN;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.TREND_ANOMALY_METRICS_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_AGGREGATE_RECORD_SQL;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_ANOMALY_METRICS_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_CLUSTER_AGGREGATE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_CLUSTER_AGGREGATE_TIME_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_CONTAINER_METRICS_SQL;
@@ -85,7 +80,6 @@
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_INSTANCE_HOST_METADATA_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_METADATA_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_METRICS_SQL;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_TREND_ANOMALY_METRICS_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.source.InternalSourceProvider.SOURCE_NAME.RAW_METRICS;
 
 import java.io.IOException;
@@ -322,57 +316,6 @@
     commitMetrics(Collections.singletonList(timelineMetrics));
   }
 
-  private void commitAnomalyMetric(Connection conn, TimelineMetric metric) {
-    PreparedStatement metricRecordStmt = null;
-    try {
-      Map<String, String> metricMetadata = metric.getMetadata();
-      
-      byte[] uuid = metadataManagerInstance.getUuid(metric);
-      if (uuid == null) {
-        LOG.error("Error computing UUID for metric. Cannot write metrics : " + metric.toString());
-        return;
-      }
-
-      if (metric.getAppId().equals("anomaly-engine-ks") || metric.getAppId().equals("anomaly-engine-hsdev")) {
-        metricRecordStmt = conn.prepareStatement(String.format(UPSERT_TREND_ANOMALY_METRICS_SQL,
-          TREND_ANOMALY_METRICS_TABLE_NAME));
-
-        metricRecordStmt.setBytes(1, uuid);
-        metricRecordStmt.setLong(2, metric.getStartTime());
-        metricRecordStmt.setLong(3, Long.parseLong(metricMetadata.get("test-start-time")));
-        metricRecordStmt.setLong(4, Long.parseLong(metricMetadata.get("train-start-time")));
-        metricRecordStmt.setLong(5, Long.parseLong(metricMetadata.get("train-end-time")));
-        String json = TimelineUtils.dumpTimelineRecordtoJSON(metric.getMetricValues());
-        metricRecordStmt.setString(6, json);
-        metricRecordStmt.setString(7, metric.getMetadata().get("method"));
-        double anomalyScore = metric.getMetadata().containsKey("anomaly-score") ? Double.parseDouble(metric.getMetadata().get("anomaly-score"))  : 0.0;
-        metricRecordStmt.setDouble(8, anomalyScore);
-
-      } else {
-        metricRecordStmt = conn.prepareStatement(String.format(
-          UPSERT_ANOMALY_METRICS_SQL, ANOMALY_METRICS_TABLE_NAME));
-
-        metricRecordStmt.setBytes(1, uuid);
-        metricRecordStmt.setLong(2, metric.getStartTime());
-        String json = TimelineUtils.dumpTimelineRecordtoJSON(metric.getMetricValues());
-        metricRecordStmt.setString(3, json);
-        metricRecordStmt.setString(4, metric.getMetadata().get("method"));
-        double anomalyScore = metric.getMetadata().containsKey("anomaly-score") ? Double.parseDouble(metric.getMetadata().get("anomaly-score"))  : 0.0;
-        metricRecordStmt.setDouble(5, anomalyScore);
-      }
-
-      try {
-        metricRecordStmt.executeUpdate();
-      } catch (SQLException sql) {
-        LOG.error("Failed on insert records to store.", sql);
-      }
-
-    } catch (Exception e) {
-      LOG.error("Failed on insert records to anomaly table.", e);
-    }
-
-  }
-
   public void commitMetrics(Collection<TimelineMetrics> timelineMetricsCollection) {
     LOG.debug("Committing metrics to store");
     Connection conn = null;
@@ -384,9 +327,6 @@
               UPSERT_METRICS_SQL, METRICS_RECORD_TABLE_NAME));
       for (TimelineMetrics timelineMetrics : timelineMetricsCollection) {
         for (TimelineMetric metric : timelineMetrics.getMetrics()) {
-          if (metric.getAppId().startsWith("anomaly-engine") && !metric.getAppId().equals("anomaly-engine-test-metric")) {
-            commitAnomalyMetric(conn, metric);
-          }
 
           metricRecordStmt.clearParameters();
 
@@ -536,20 +476,6 @@
       stmt.executeUpdate( String.format(CREATE_CONTAINER_METRICS_TABLE_SQL,
         encoding, tableTTL.get(CONTAINER_METRICS_TABLE_NAME), compression));
 
-      //Anomaly Metrics
-      stmt.executeUpdate(String.format(CREATE_ANOMALY_METRICS_TABLE_SQL,
-        ANOMALY_METRICS_TABLE_NAME,
-        encoding,
-        tableTTL.get(METRICS_AGGREGATE_HOURLY_TABLE_NAME),
-        compression));
-
-      //Trend Anomaly Metrics
-      stmt.executeUpdate(String.format(CREATE_TREND_ANOMALY_METRICS_TABLE_SQL,
-        TREND_ANOMALY_METRICS_TABLE_NAME,
-        encoding,
-        tableTTL.get(METRICS_AGGREGATE_HOURLY_TABLE_NAME),
-        compression));
-
       // Host level
       String precisionSql = String.format(CREATE_METRICS_TABLE_SQL,
         encoding, tableTTL.get(METRICS_RECORD_TABLE_NAME), compression);
@@ -949,47 +875,6 @@
     insertMetricRecords(metrics, false);
   }
 
-  public TimelineMetrics getAnomalyMetricRecords(String method, long startTime, long endTime, Integer limit) throws SQLException {
-    Connection conn = getConnection();
-    PreparedStatement stmt = null;
-    ResultSet rs = null;
-    TimelineMetrics metrics = new TimelineMetrics();
-    try {
-      stmt = PhoenixTransactSQL.prepareAnomalyMetricsGetSqlStatement(conn, method, startTime, endTime, limit);
-      rs = stmt.executeQuery();
-      while (rs.next()) {
-
-        byte[] uuid = rs.getBytes("UUID");
-        TimelineMetric metric = metadataManagerInstance.getMetricFromUuid(uuid);
-
-        if (method.equals("ks") || method.equals("hsdev")) {
-          metric.setStartTime(rs.getLong("TEST_END_TIME"));
-        } else {
-          metric.setStartTime(rs.getLong("SERVER_TIME"));
-        }
-        metric.setInstanceId(null);
-
-        HashMap<String, String> metadata = new HashMap<>();
-        metadata.put("method", rs.getString("METHOD"));
-        metadata.put("anomaly-score", String.valueOf(rs.getDouble("ANOMALY_SCORE")));
-        if (method.equals("ks") || method.equals("hsdev")) {
-          metadata.put("test-start-time", String.valueOf(rs.getLong("TEST_START_TIME")));
-          metadata.put("train-start-time", String.valueOf(rs.getLong("TRAIN_START_TIME")));
-          metadata.put("train-end-time", String.valueOf(rs.getLong("TRAIN_END_TIME")));
-        }
-        metric.setMetadata(metadata);
-
-        TreeMap<Long, Double> sortedByTimeMetrics = readMetricFromJSON(rs.getString("METRICS"));
-        metric.setMetricValues(sortedByTimeMetrics);
-
-        metrics.getMetrics().add(metric);
-      }
-    } catch (Exception ex) {
-      LOG.error(ex);
-    }
-    return metrics;
-  }
-
 
   @SuppressWarnings("unchecked")
   public TimelineMetrics getMetricRecords(
diff --git a/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java b/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java
index 349ef83..b2cd1c2 100644
--- a/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java
+++ b/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java
@@ -107,9 +107,4 @@
      * @return [ hostname ]
      */
   List<String> getLiveInstances();
-
-  TimelineMetrics getAnomalyMetrics(String method, long startTime, long endTime, Integer limit) throws SQLException;
-
-  Set<Map<String, String>> getTimelineMetricKeys(String metricName, String appId, String instanceId,  List<String> hosts) throws SQLException, IOException;
-
 }
diff --git a/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java b/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
index a1755f0..9077ac6 100644
--- a/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
+++ b/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
@@ -39,27 +39,6 @@
 
   public static final Log LOG = LogFactory.getLog(PhoenixTransactSQL.class);
 
-  public static final String CREATE_ANOMALY_METRICS_TABLE_SQL =
-    "CREATE TABLE IF NOT EXISTS %s " +
-      "(UUID BINARY(20) NOT NULL, " +
-      "SERVER_TIME UNSIGNED_LONG NOT NULL, " +
-      "METRICS VARCHAR, " +
-      "METHOD VARCHAR, " +
-      "ANOMALY_SCORE DOUBLE CONSTRAINT pk " +
-      "PRIMARY KEY (UUID, SERVER_TIME)) DATA_BLOCK_ENCODING='%s', IMMUTABLE_ROWS=true, TTL=%s, COMPRESSION='%s'";
-
-  public static final String CREATE_TREND_ANOMALY_METRICS_TABLE_SQL =
-    "CREATE TABLE IF NOT EXISTS %s " +
-      "(UUID BINARY(20) NOT NULL, " +
-      "TEST_START_TIME UNSIGNED_LONG NOT NULL, " +
-      "TEST_END_TIME UNSIGNED_LONG NOT NULL, " +
-      "TRAIN_START_TIME UNSIGNED_LONG, " +
-      "TRAIN_END_TIME UNSIGNED_LONG, " +
-      "METRICS VARCHAR, " +
-      "METHOD VARCHAR, " +
-      "ANOMALY_SCORE DOUBLE CONSTRAINT pk " +
-      "PRIMARY KEY (UUID, TEST_START_TIME, TEST_END_TIME)) DATA_BLOCK_ENCODING='%s', IMMUTABLE_ROWS=true, TTL=%s, COMPRESSION='%s'";
-
   /**
    * Create table to store individual metric records.
    */
@@ -169,25 +148,6 @@
    */
   public static final String ALTER_SQL = "ALTER TABLE %s SET TTL=%s";
 
-  public static final String UPSERT_ANOMALY_METRICS_SQL = "UPSERT INTO %s " +
-    "(UUID, " +
-    "SERVER_TIME, " +
-    "METRICS, " +
-    "METHOD, " +
-    "ANOMALY_SCORE) VALUES " +
-    "(?, ?, ?, ?, ?)";
-
-  public static final String UPSERT_TREND_ANOMALY_METRICS_SQL = "UPSERT INTO %s " +
-    "(UUID, " +
-    "TEST_START_TIME, " +
-    "TEST_END_TIME, " +
-    "TRAIN_START_TIME, " +
-    "TRAIN_END_TIME, " +
-    "METRICS, " +
-    "METHOD, " +
-    "ANOMALY_SCORE) VALUES " +
-    "(?, ?, ?, ?, ?, ?, ?, ?)";
-
   /**
    * Insert into metric records table.
    */
@@ -263,22 +223,6 @@
   public static final String UPSERT_INSTANCE_HOST_METADATA_SQL =
     "UPSERT INTO INSTANCE_HOST_METADATA (INSTANCE_ID, HOSTNAME) VALUES (?, ?)";
 
-  public static final String GET_ANOMALY_METRIC_SQL = "SELECT UUID, SERVER_TIME, " +
-    "METRICS, " +
-    "METHOD, " +
-    "ANOMALY_SCORE " +
-    "FROM %s " +
-    "WHERE METHOD = ? AND SERVER_TIME > ? AND SERVER_TIME <= ? ORDER BY ANOMALY_SCORE DESC";
-
-  public static final String GET_TREND_ANOMALY_METRIC_SQL = "SELECT UUID, " +
-    "TEST_START_TIME, TEST_END_TIME, " +
-    "TRAIN_START_TIME, TRAIN_END_TIME, " +
-    "METRICS, " +
-    "METHOD, " +
-    "ANOMALY_SCORE " +
-    "FROM %s " +
-    "WHERE METHOD = ? AND TEST_END_TIME > ? AND TEST_END_TIME <= ? ORDER BY ANOMALY_SCORE DESC";
-
   /**
    * Retrieve a set of rows from metrics records table.
    */
@@ -403,9 +347,6 @@
     "MAX(METRIC_MAX), MIN(METRIC_MIN) FROM %s WHERE METRIC_NAME LIKE %s AND SERVER_TIME > %s AND " +
     "SERVER_TIME <= %s GROUP BY METRIC_NAME, APP_ID, INSTANCE_ID, UNITS";
 
-  public static final String ANOMALY_METRICS_TABLE_NAME = "METRIC_ANOMALIES";
-  public static final String TREND_ANOMALY_METRICS_TABLE_NAME = "TREND_METRIC_ANOMALIES";
-
   public static final String METRICS_RECORD_TABLE_NAME = "METRIC_RECORD";
 
   public static final String CONTAINER_METRICS_TABLE_NAME = "CONTAINER_METRICS";
@@ -470,40 +411,6 @@
     PhoenixTransactSQL.sortMergeJoinEnabled = sortMergeJoinEnabled;
   }
 
-  public static PreparedStatement prepareAnomalyMetricsGetSqlStatement(Connection connection, String method,
-                                                                       long startTime, long endTime, Integer limit) throws SQLException {
-    StringBuilder sb = new StringBuilder();
-    if (method.equals("ema") || method.equals("tukeys")) {
-      sb.append(String.format(GET_ANOMALY_METRIC_SQL, ANOMALY_METRICS_TABLE_NAME));
-    } else {
-      sb.append(String.format(GET_TREND_ANOMALY_METRIC_SQL, TREND_ANOMALY_METRICS_TABLE_NAME));
-    }
-    if (limit != null) {
-      sb.append(" LIMIT " + limit);
-    }
-    PreparedStatement stmt = null;
-    try {
-      stmt = connection.prepareStatement(sb.toString());
-      int pos = 1;
-
-      stmt.setString(pos++, method);
-      stmt.setLong(pos++, startTime);
-      stmt.setLong(pos, endTime);
-      if (limit != null) {
-        stmt.setFetchSize(limit);
-      }
-
-    } catch (SQLException e) {
-      if (stmt != null) {
-        stmt.close();
-      }
-      throw e;
-    }
-
-    return stmt;
-  }
-
-
   public static PreparedStatement prepareGetMetricsSqlStmt(Connection connection,
                                                            Condition condition) throws SQLException {
 
diff --git a/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java b/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
index 2930b33..c09900d 100644
--- a/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
+++ b/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
@@ -284,26 +284,6 @@
   }
 
   @GET
-  @Path("/metrics/anomalies")
-  @Produces({ MediaType.APPLICATION_JSON })
-  public TimelineMetrics getAnomalyMetrics(
-    @Context HttpServletRequest req,
-    @Context HttpServletResponse res,
-    @QueryParam("method") String method,
-    @QueryParam("startTime") String startTime,
-    @QueryParam("endTime") String endTime,
-    @QueryParam("limit") String limit
-    ) {
-    init(res);
-
-    try {
-      return timelineMetricStore.getAnomalyMetrics(method, parseLongStr(startTime), parseLongStr(endTime), parseIntStr(limit));
-    } catch (Exception e) {
-      throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
-    }
-  }
-
-  @GET
   @Path("/metrics/metadata")
   @Produces({ MediaType.APPLICATION_JSON })
   public Map<String, List<TimelineMetricMetadata>> getTimelineMetricMetadata(
@@ -379,30 +359,6 @@
     }
   }
 
-  @GET
-  @Path("/metrics/metadata/key")
-  @Produces({ MediaType.APPLICATION_JSON })
-  public Set<Map<String, String>> getTimelineMetricKey(
-    @Context HttpServletRequest req,
-    @Context HttpServletResponse res,
-    @QueryParam("metricName") String metricName,
-    @QueryParam("appId") String appId,
-    @QueryParam("instanceId") String instanceId,
-    @QueryParam("hostname") String hostname
-  ) {
-    init(res);
-
-    try {
-      if (StringUtils.isEmpty(hostname)) {
-        return timelineMetricStore.getTimelineMetricKeys(metricName, appId, instanceId, Collections.EMPTY_LIST);
-      } else {
-        return timelineMetricStore.getTimelineMetricKeys(metricName, appId, instanceId, Arrays.asList(StringUtils.split(hostname, ",")));
-      }
-    } catch (Exception e) {
-      throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
-    }
-  }
-
   /**
    * This is a discovery endpoint that advertises known live collector
    * instances. Note: It will always answer with current instance as live.
diff --git a/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java b/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
index de24c68..0549eb4 100644
--- a/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
+++ b/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
@@ -120,14 +120,4 @@
     return null;
   }
 
-  @Override
-  public TimelineMetrics getAnomalyMetrics(String method, long startTime, long endTime, Integer limit) {
-    return null;
-  }
-
-  @Override
-  public Set<Map<String, String>> getTimelineMetricKeys(String metricName, String appId, String instanceId, List<String> hosts) throws SQLException, IOException {
-    return Collections.emptySet();
-  }
-
 }
diff --git a/pom.xml b/pom.xml
index c91f2f9..32dfad7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -33,7 +33,6 @@
     <module>ambari-metrics-host-monitoring</module>
     <module>ambari-metrics-grafana</module>
     <module>ambari-metrics-host-aggregator</module>
-    <module>ambari-metrics-anomaly-detection-service</module>
     <module>ambari-metrics-assembly</module>
   </modules>
   <properties>
@@ -49,8 +48,6 @@
     <grafana.tar>https://grafanarel.s3.amazonaws.com/builds/grafana-2.6.0.linux-x64.tar.gz</grafana.tar>
     <phoenix.tar>http://dev.hortonworks.com.s3.amazonaws.com/HDP/centos7/3.x/BUILDS/3.0.0.0-623/tars/phoenix/phoenix-5.0.0.3.0.0.0-623.tar.gz</phoenix.tar>
     <phoenix.folder>phoenix-5.0.0.3.0.0.0-623</phoenix.folder>
-    <spark.tar>http://dev.hortonworks.com.s3.amazonaws.com/HDP/centos7/3.x/BUILDS/3.0.0.0-439/tars/spark2/spark-2.1.0.3.0.0.0-439-bin-3.0.0.3.0.0.0-439.tgz</spark.tar>
-    <spark.folder>spark-2.1.0.3.0.0.0-439-bin-3.0.0.3.0.0.0-439</spark.folder>
     <resmonitor.install.dir>/usr/lib/python2.6/site-packages/resource_monitoring</resmonitor.install.dir>
     <powermock.version>1.6.2</powermock.version>
     <distMgmtSnapshotsId>apache.snapshots.https</distMgmtSnapshotsId>