Merge branch 'master' into GRIFFIN-AGORSHKOV
diff --git a/.gitignore b/.gitignore
index a09422a..5e0ef52 100644
--- a/.gitignore
+++ b/.gitignore
@@ -41,4 +41,5 @@
 
 .vscode
 
-environment.local.ts
\ No newline at end of file
+environment.local.ts
+logs/
\ No newline at end of file
diff --git a/KEYS b/KEYS
index 802b0c2..1173ae2 100644
--- a/KEYS
+++ b/KEYS
@@ -172,3 +172,52 @@
 3OmWOig=
 =me+d
 -----END PGP PUBLIC KEY BLOCK-----
+
+pub   rsa3072 2019-04-04 [SC]
+      DEB7B22DE1FDD7BE93368A231033FD5FC7A80CB5
+uid           [ultimate] Eugene (Apache Griffin Community) <liujin@apache.org>
+sig 3        1033FD5FC7A80CB5 2019-04-04  Eugene (Apache Griffin Community) <liujin@apache.org>
+sub   rsa3072 2019-04-04 [E]
+sig          1033FD5FC7A80CB5 2019-04-04  Eugene (Apache Griffin Community) <liujin@apache.org>
+
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQGNBFylXOcBDADA9jLy1iLhXUMOD5L89ZSzqDlL6jBm05kQPX2zKj6EmUoEsFad
+cra6iLo2v/iOrQ+khx2r2pExzPYcPRLy7Dk+myzz+N50owTdPY5MYGKcAVT8Dk8f
+DepXUQdE4TpsvKRS+770ni4KKS+bE0VRaQHzmCUTWgHFP74nqcd/utz7EJzyDoLB
+gph5Hqc40XkjhaDTg1zHownfYsYle2qi+O3llUB/Gnizeq6+XHKjVeOd7rg/KgG1
+yBMGCFrzBENNZRhVyndCGywPTnnZJ36hJtFwrc1rS9NTHzmWIjyeyy/zp3CsXo8N
+pUH1yUMxgCaTxXphNamMqh7hw/fNG5RPQUwuD2dJZv8Sv84dRBqvw1nKN6L4Gt0L
+6aEJjbDtpvC9egIsbv8HevWOMjQ3vlx5k4rGYWY49A+ft7/Iu4z2X3u+/hlH0KJ3
+ogP8jxruhKut049UjaXMNThwkkstwYRJCNzn6jIwUCaULcsvAfInjr/b7tgo21hb
+RBJvnlV8Dh5mr+sAEQEAAbQ1RXVnZW5lIChBcGFjaGUgR3JpZmZpbiBDb21tdW5p
+dHkpIDxsaXVqaW5AYXBhY2hlLm9yZz6JAc4EEwEKADgWIQTet7It4f3XvpM2iiMQ
+M/1fx6gMtQUCXKVc5wIbAwULCQgHAgYVCgkICwIEFgIDAQIeAQIXgAAKCRAQM/1f
+x6gMtT0+DAC85GX9JTypzWNAH5IY1CHDpU7PCNzJuv23HHhxMYyVCwkpMLET2VVR
+t+lRqm0yqBOuvJfTgdTRdOsg2F56JWFRJTf5wwhq9N90d8pQz8D1Y/1zYlUtho6V
+j49KD1zi7QldldSoHWHKGWoQpI2gUGgKjpzwHIrS8bHpZKm8QWeMRMgbTnzX2a+0
+HOxqeNrbup/DP8ycYKxErDKyMV+LolAEyJdEyRmAcMuUgyIbz6AZb85oV2jy1VNy
+baxeojcqITBHt3y/TNVxJayKND3Cdp7WyJUbvb8JzLXec80uWnNN6BApgOARO/Wt
+KtFxD2lvVdQCdLiu7wKbBxjV0UoyvRRCFVI0CEoc2FW/rZvdkrIVqGpfiOW5a0d1
+TyrEfncl3linnLS7NuDPE0liFxcZbFORN1e2zq4OdB9skczGdIYcc9OB1mNRpIBi
+LkdSAj+wdfyrU1h8C/WbLtYpkdbQ2n4u6EHolQ9UE28xy2W8Q0uonAxcQQ10DyId
+t4KWA2pRDZK5AY0EXKVc5wEMAOxhE7501mpLFdtgzJq1PNcTAUJpaVmBugc9zg4b
+Ejc6NVUfWIvppvup9c7SAdD3U8XT1e0kmsvw3adujqVXllq3w7KX6nN7V2NcarqF
+jMbRoOu+MANZwJIGXNJDhTVSabRp00QvuiiruNu6W/ad3m+9jAmN+uVig/1KXQGz
+Jr5IPHLY34sOwvq8tYYydfgVPExHhrinv0F0xxG6J6ysk22qUiwU1Wuy3D7Zk/h6
+j1hBLllqfAUt6pQYdHm8M330nvkST3+icRkSLUZgLtrhkl2D99jhGtc0GJMOFebQ
+JTeIfliHuQsPFrvqJ+5S3t/cAtWOeK2Tyg7cqvDzrrCZ2FJ7QsAQwIc8IAF8pe22
+OyBBwIxftPkdclAlZvFzBC1XScFCzF7rd2TiJ2+lpnySAxR4V0xp7+JlgRllaUAW
+jzQQgS0Q11dDvnLIv2hp3F/9vMsUvD2RSQBDwll3GbKHgZLiWJHu3Y6xLuHiF0A9
+wo8SQVc64sBuTJxcEf3Os2jUvQARAQABiQG2BBgBCgAgFiEE3reyLeH9176TNooj
+EDP9X8eoDLUFAlylXOcCGwwACgkQEDP9X8eoDLWTuQv+IEmAwx5mxUM6768Sj3Dl
+zdyAO9jEkNxEeFGSqG+I6aHjnrn/kIpmlDOMvUO1PlgqWjV7JSkKD4GG1HN3ndJB
+5ZZ9m064LCn6Xq/tUMQAMqdF+UB8PuXky7YsFpd/RAMD1OWSkhwE8VfU909RSmuG
+y+G2SJMvqvC9RQXh6gpzTi2sq4hiOc2My/YbGHwUYNRiHPO3hmsjVYJ2FI/GoGEH
+AaLiGcbYw9p/V9Sjgk+lPKOqWE7JjA1T5A4Bv2shhj6pDH6IXs7RTO4wbc/E++j1
+L51KqR8nK2uSWK3h75uCJnh7qSM6auRP2Vd/BuJHyj2pe/A5qBesz2kvRIySPDCM
+cwVS70sBLcgs32/mEYha3Mi0VL1xo48n4I/7AXn+XplyEmxz/kChxSDMm0SmU+3W
+wYPaWV5d2xuFAm5B9B4j9AGNHYquhf0wN6mU4JIDPNL/E1U4lYLlGzU6qvXLseNe
+SaTAW5dwCXx8y9Y7aLhwJvWNozCOWcRV0f34ATsfLzeb
+=3Fn/
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/griffin-doc/deploy/deploy-guide.md b/griffin-doc/deploy/deploy-guide.md
index 7327b15..33e2f40 100644
--- a/griffin-doc/deploy/deploy-guide.md
+++ b/griffin-doc/deploy/deploy-guide.md
@@ -18,15 +18,18 @@
 -->
 
 # Apache Griffin Deployment Guide
-For Apache Griffin users, please follow the instructions below to deploy Apache Griffin in your environment. Note that there are some dependencies that should be installed firstly.
+If you are a new guy for Apache Griffin, please follow the instructions below to deploy Apache Griffin in your environment. Note that those steps will install all products in one physical machine, so you have to tune configurations depending on true topology.
 
 ### Prerequisites
 Firstly you need to install and configure following software products, here we use [ubuntu-18.10](https://www.ubuntu.com/download) as sample OS to prepare all dependencies.
 ```bash
 # put all download packages into /apache folder
-$ mkdir /home/user/software
-$ sudo ln -s /home/user/software /apache
+$ mkdir /home/<user>/software
+$ mkdir /home/<user>/software/data
+$ sudo ln -s /home/<user>/software /apache
 $ sudo ln -s /apache/data /data
+$ mkdir /apache/tmp
+$ mkdir /apache/tmp/hive
 ```
 
 - JDK (1.8 or later versions)
@@ -56,17 +59,19 @@
 $ npm -v
 ```
 
-- [Hadoop](http://apache.claz.org/hadoop/common/) (2.6.0 or later), you can get some help [here](https://hadoop.apache.org/docs/r2.7.2/hadoop-project-dist/hadoop-common/SingleCluster.html).
+- [Hadoop](http://apache.claz.org/hadoop/common/) (2.6.0 or later), you can get some helps [here](https://hadoop.apache.org/docs/r2.7.2/hadoop-project-dist/hadoop-common/SingleCluster.html).
 
-- [Hive](http://apache.claz.org/hive/) (version 2.x), you can get some help [here](https://cwiki.apache.org/confluence/display/Hive/GettingStarted#GettingStarted-RunningHive).
+- [Hive](http://apache.claz.org/hive/) (version 2.x), you can get some helps [here](https://cwiki.apache.org/confluence/display/Hive/GettingStarted#GettingStarted-RunningHive).
 
-- [Spark](http://spark.apache.org/downloads.html) (version 2.2.1), if you want to install Pseudo Distributed/Single Node Cluster, you can get some help [here](http://why-not-learn-something.blogspot.com/2015/06/spark-installation-pseudo.html).
+- [Spark](http://spark.apache.org/downloads.html) (version 2.2.1), if you want to install Pseudo Distributed/Single Node Cluster, you can get some helps [here](http://why-not-learn-something.blogspot.com/2015/06/spark-installation-pseudo.html).
 
-- [Livy](http://archive.cloudera.com/beta/livy/livy-server-0.3.0.zip), you can get some help [here](http://livy.io/quickstart.html).
+- [Livy](http://archive.cloudera.com/beta/livy/livy-server-0.3.0.zip), you can get some helps [here](http://livy.io/quickstart.html).
 
 - [ElasticSearch](https://www.elastic.co/downloads/elasticsearch) (5.0 or later versions).
 	ElasticSearch works as a metrics collector, Apache Griffin produces metrics into it, and our default UI gets metrics from it, you can use them by your own way as well.
 
+- [Scala](https://downloads.lightbend.com/scala/2.12.8/scala-2.12.8.tgz), you can get some helps [here](https://www.scala-lang.org/).
+
 ### Configuration
 
 #### PostgreSQL
@@ -93,7 +98,7 @@
 
 #### Set Env
 
-export those variables below, or create hadoop_env.sh and put it into .bashrc
+Export those variables below, or create griffin_env.sh and put it into .bashrc.
 ```bash
 #!/bin/bash
 export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
@@ -111,18 +116,20 @@
 export HIVE_HOME=/apache/hive
 export YARN_HOME=/apache/hadoop
 export SCALA_HOME=/apache/scala
+
+export PATH=$PATH:$HIVE_HOME/bin:$HADOOP_HOME/bin:$SPARK_HOME/bin:$LIVY_HOME/bin:$SCALA_HOME/bin
 ```
 
 #### Hadoop
-
 * **update configuration**
 
-here are sample configurations for hadoop<br>
 Put site-specific property overrides in this file **/apache/hadoop/etc/hadoop/core-site.xml**
 ```xml
 <configuration>
-    <name>fs.defaultFS</name>
-    <value>hdfs://127.0.0.1:9000</value>
+    <property>
+        <name>fs.defaultFS</name>
+        <value>hdfs://127.0.0.1:9000</value>
+    </property>    
 </configuration>
 ```
 
@@ -138,14 +145,6 @@
         <value>1</value>
     </property>
     <property>
-        <name>dfs.namenode.servicerpc-address</name>
-        <value>127.0.0.1:9001</value>
-    </property>
-    <property>
-        <name>dfs.namenode.rpc-address</name>
-        <value>127.0.0.1:9002</value>
-    </property>
-    <property>
         <name>dfs.namenode.name.dir</name>
         <value>file:///data/hadoop-data/nn</value>
     </property>
@@ -175,12 +174,20 @@
 * **start/stop hadoop nodes**
 ```bash
 # format name node
+# NOTE: if you already have executed namenode-format before, it'll change cluster ID in 
+# name node's VERSION file after you run it again. so you need to guarantee same cluster ID
+# in data node's VERSION file, otherwise data node will fail to start up.
+# VERSION file resides in /apache/data/hadoop-data/nn, snn, dn denoted in previous config. 
 /apache/hadoop/bin/hdfs namenode -format
-# start namenode/datanode
+# start namenode/secondarynamenode/datanode
+# NOTE: you should use 'ps -ef|grep java' to check if namenode/secondary namenode/datanode
+# are available after starting dfs service.
+# if there is any error, please find clues from /apache/hadoop/logs/
 /apache/hadoop/sbin/start-dfs.sh
 # stop all nodes
-/apache/hadoop/sbin/stop-all.sh
+/apache/hadoop/sbin/stop-dfs.sh
 ```
+Here you can access http://127.0.0.1:50070/ to check name node.
 * **start/stop hadoop ResourceManager**
 ```bash
 # manually clear the ResourceManager state store
@@ -190,6 +197,9 @@
 # stop the ResourceManager
 /apache/hadoop/sbin/yarn-daemon.sh stop resourcemanager
 ```
+Here you can access http://127.0.0.1:8088/cluster to check hadoop cluster.
+ 
+Hadoop daemons also expose some information over HTTP like http://127.0.0.1:8088/stacks. Please refer to [blog](https://blog.cloudera.com/blog/2009/08/hadoop-default-ports-quick-reference/)
 * **start/stop hadoop NodeManager**
 ```bash
 # startup the NodeManager
@@ -197,7 +207,8 @@
 # stop the NodeManager
 /apache/hadoop/sbin/yarn-daemon.sh stop nodemanager
 ```
-* **start/stop hadoop HistoryServer**
+Here you can access http://127.0.0.1:8088/cluster/nodes to check hadoop nodes, you should see one node in the list.
+* **(optional) start/stop hadoop HistoryServer**
 ```bash
 # startup the HistoryServer
 /apache/hadoop/sbin/mr-jobhistory-daemon.sh start historyserver
@@ -206,18 +217,32 @@
 ```
 
 #### Hive
-You need to make sure that your spark cluster could access your HiveContext.
 * **update configuration**
 Copy hive/conf/hive-site.xml.template to hive/conf/hive-site.xml and update some fields.
 ```xml
 +++ hive/conf/hive-site.xml	2018-12-16 11:17:51.000000000 +0800
+@@ -72,12 +72,12 @@
+   </property>
+   <property>
+     <name>hive.exec.local.scratchdir</name>
+-    <value>${system:java.io.tmpdir}/${system:user.name}</value>
++    <value>/apache/tmp/hive</value>
+     <description>Local scratch space for Hive jobs</description>
+   </property>
+   <property>
+     <name>hive.downloaded.resources.dir</name>
+-    <value>${system:java.io.tmpdir}/${hive.session.id}_resources</value>
++    <value>/apache/tmp/hive/${hive.session.id}_resources</value>
+     <description>Temporary local directory for added resources in the remote file system.</description>
+   </property>
+   <property>
 @@ -368,7 +368,7 @@
    </property>
    <property>
      <name>hive.metastore.uris</name>
 -    <value/>
 +    <value>thrift://127.0.0.1:9083</value>
-     <description>Thrift URI for the remote metastore. Used by metastore client to connect to remote metastore.</description>
+     <description>Thrift URI for the remote metastore.</description>
    </property>
    <property>
 @@ -527,7 +527,7 @@
@@ -256,6 +281,23 @@
      <description>Username to use against metastore database</description>
    </property>
    <property>
+@@ -1682,7 +1682,7 @@
+   </property>
+   <property>
+     <name>hive.querylog.location</name>
+-    <value>${system:java.io.tmpdir}/${system:user.name}</value>
++    <value>/apache/tmp/hive</value>
+     <description>Location of Hive run time structured log file</description>
+   </property>
+   <property>
+@@ -3973,7 +3973,7 @@
+   </property>
+   <property>
+     <name>hive.server2.logging.operation.log.location</name>
+-    <value>${system:java.io.tmpdir}/${system:user.name}/operation_logs</value>
++    <value>/apache/tmp/hive/operation_logs</value>
+   </property>
+   <property>
 ```
 
 * **start up hive metastore service**
@@ -265,42 +307,508 @@
 ```
 
 #### Spark
-* **start up spark nodes**
+* **update configuration**
+
+Check $SPARK_HOME/conf/spark-default.conf
+```
+spark.master                    yarn-cluster
+spark.serializer                org.apache.spark.serializer.KryoSerializer
+spark.yarn.jars                 hdfs:///home/spark_lib/*
+spark.yarn.dist.files		hdfs:///home/spark_conf/hive-site.xml
+spark.sql.broadcastTimeout  500
+```
+Check $SPARK_HOME/conf/spark-env.sh
+```
+HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
+SPARK_MASTER_HOST=localhost
+SPARK_MASTER_PORT=7077
+SPARK_MASTER_WEBUI_PORT=8082
+SPARK_LOCAL_IP=localhost
+SPARK_PID_DIR=/apache/pids
+```
+Upload some files otherwise you will hit `Error: Could not find or load main class org.apache.spark.deploy.yarn.ApplicationMaster`, when you schedule spark applications.
+```bash
+hdfs dfs -mkdir /home/spark_lib
+hdfs dfs -mkdir /home/spark_conf
+hdfs dfs -put $SPARK_HOME/jars/*  hdfs:///home/spark_lib/
+hdfs dfs -put $HIVE_HOME/conf/hive-site.xml hdfs:///home/spark_conf/
+```
+* **start/stop spark nodes**
 ```bash
 cp /apache/hive/conf/hive-site.xml /apache/spark/conf/
+# start master and slave nodes
 /apache/spark/sbin/start-master.sh
 /apache/spark/sbin/start-slave.sh  spark://localhost:7077
+
+# stop master and slave nodes
+/apache/spark/sbin/stop-slaves.sh 
+/apache/spark/sbin/stop-master.sh 
+
+# stop all
+/apache/spark/sbin/stop-all.sh
 ```
 
 #### Livy
 Apache Griffin need to schedule spark jobs by server, we use livy to submit our jobs.
-For some issues of Livy for HiveContext, we need to download 3 files or get them from Spark lib `$SPARK_HOME/lib/`, and put them into HDFS.
-```
-datanucleus-api-jdo-3.2.6.jar
-datanucleus-core-3.2.10.jar
-datanucleus-rdbms-3.2.9.jar
-```
+
 * **update configuration**
 ```bash
-mkdir livy/logs
-
-# update livy/conf/livy.conf
+mkdir /apache/livy/logs
+```
+Update $LIVY_HOME/conf/livy.conf
+```bash
+# update /apache/livy/conf/livy.conf
 livy.server.host = 127.0.0.1
 livy.spark.master = yarn
 livy.spark.deployMode = cluster
 livy.repl.enableHiveContext = true
+livy.server.port 8998
 ```
 * **start up livy**
 ```bash
-/apache/livy/LivyServer
+/apache/livy/bin/livy-server start
 ```
 
 #### Elasticsearch
+* **update configuration**
 
-You might want to create Elasticsearch index in advance, in order to set number of shards, replicas, and other settings to desired values:
+Update $ES_HOME/config/elasticsearch.yml
 ```
-curl -XPUT http://es:9200/griffin -d '
+network.host: 127.0.0.1
+http.cors.enabled: true
+http.cors.allow-origin: "*"
+```
+* **start up elasticsearch**
+```bash
+/apache/elastic/bin/elasticsearch
+```
+You can access http://127.0.0.1:9200/ to check elasticsearch service.
+
+#### Griffin
+You can download latest package from [official link](http://griffin.apache.org/docs/latest.html), or locally build on [source codes](https://github.com/apache/griffin.git).
+
+Before building Griffin, you have to update those configuration depending on previous steps's configuration.
+
+* **service/src/main/resources/application.properties**
+
+You can get more detailed configuration description in [here](#griffin-customization).
+```
+# Apache Griffin server port (default 8080)
+server.port = 8080
+spring.application.name=griffin_service
+
+# db configuration
+spring.datasource.url=jdbc:postgresql://localhost:5432/myDB?autoReconnect=true&useSSL=false
+spring.datasource.username=king
+spring.datasource.password=secret
+spring.jpa.generate-ddl=true
+spring.datasource.driver-class-name=org.postgresql.Driver
+spring.jpa.show-sql=true
+
+# Hive metastore
+hive.metastore.uris=thrift://localhost:9083
+hive.metastore.dbname=default
+hive.hmshandler.retry.attempts=15
+hive.hmshandler.retry.interval=2000ms
+# Hive cache time
+cache.evict.hive.fixedRate.in.milliseconds=900000
+
+# Kafka schema registry
+kafka.schema.registry.url=http://localhost:8081
+# Update job instance state at regular intervals
+jobInstance.fixedDelay.in.milliseconds=60000
+# Expired time of job instance which is 7 days that is 604800000 milliseconds.Time unit only supports milliseconds
+jobInstance.expired.milliseconds=604800000
+# schedule predicate job every 5 minutes and repeat 12 times at most
+#interval time unit s:second m:minute h:hour d:day,only support these four units
+predicate.job.interval=5m
+predicate.job.repeat.count=12
+# external properties directory location
+external.config.location=
+# external BATCH or STREAMING env
+external.env.location=
+# login strategy ("default" or "ldap")
+login.strategy=default
+# ldap
+ldap.url=ldap://hostname:port
+ldap.email=@example.com
+ldap.searchBase=DC=org,DC=example
+ldap.searchPattern=(sAMAccountName={0})
+# hdfs default name
+fs.defaultFS=
+
+# elasticsearch
+# elasticsearch.host = <IP>
+# elasticsearch.port = <elasticsearch rest port>
+# elasticsearch.user = user
+# elasticsearch.password = password
+elasticsearch.host=localhost
+elasticsearch.port=9200
+elasticsearch.scheme=http
+
+# livy
+livy.uri=http://localhost:8998/batches
+# yarn url
+yarn.uri=http://localhost:8088
+# griffin event listener
+internal.event.listeners=GriffinJobEventHook
+```  
+
+* **service/src/main/resources/quartz.properties**
+```
+org.quartz.scheduler.instanceName=spring-boot-quartz
+org.quartz.scheduler.instanceId=AUTO
+org.quartz.threadPool.threadCount=5
+org.quartz.jobStore.class=org.quartz.impl.jdbcjobstore.JobStoreTX
+# If you use postgresql, set this property value to org.quartz.impl.jdbcjobstore.PostgreSQLDelegate
+# If you use mysql, set this property value to org.quartz.impl.jdbcjobstore.StdJDBCDelegate
+# If you use h2, it's ok to set this property value to StdJDBCDelegate, PostgreSQLDelegate or others
+org.quartz.jobStore.driverDelegateClass=org.quartz.impl.jdbcjobstore.PostgreSQLDelegate
+org.quartz.jobStore.useProperties=true
+org.quartz.jobStore.misfireThreshold=60000
+org.quartz.jobStore.tablePrefix=QRTZ_
+org.quartz.jobStore.isClustered=true
+org.quartz.jobStore.clusterCheckinInterval=20000
+```
+
+* **service/src/main/resources/sparkProperties.json**
+
+**griffin measure path** is the location where you should put the jar file of measure module.
+```
 {
+    "file": "hdfs:///<griffin measure path>/griffin-measure.jar",
+    "className": "org.apache.griffin.measure.Application",
+    "name": "griffin",
+    "queue": "default",
+    "numExecutors": 3,
+    "executorCores": 1,
+    "driverMemory": "1g",
+    "executorMemory": "1g",
+    "conf": {
+        "spark.yarn.dist.files": "hdfs:///<path to>/hive-site.xml"
+    },
+    "files": [
+    ],
+    "jars": [
+    ]
+}
+```
+
+* **service/src/main/resources/env/env_batch.json**
+
+Adjust sinks according to your requirement. At least, you will need to adjust HDFS output
+directory (hdfs:///griffin/persist by default), and Elasticsearch URL (http://es:9200/griffin/accuracy by default).
+Similar changes are required in `env_streaming.json`.
+```
+{
+  "spark": {
+    "log.level": "WARN"
+  },
+  "sinks": [
+    {
+      "type": "CONSOLE",
+      "config": {
+        "max.log.lines": 10
+      }
+    },
+    {
+      "type": "HDFS",
+      "config": {
+        "path": "hdfs:///griffin/persist",
+        "max.persist.lines": 10000,
+        "max.lines.per.file": 10000
+      }
+    },
+    {
+      "type": "ELASTICSEARCH",
+      "config": {
+        "method": "post",
+        "api": "http://127.0.0.1:9200/griffin/accuracy",
+        "connection.timeout": "1m",
+        "retry": 10
+      }
+    }
+  ],
+  "griffin.checkpoint": []
+}
+```
+
+It's easy to build Griffin, just run maven command `mvn clean install`. Successfully building, you can get two jars `service-0.4.0.jar`,`measure-0.4.0.jar` from target folder in service and measure module.
+
+Upload measure's jar to hadoop folder.
+```
+# change jar name
+mv measure-0.4.0.jar griffin-measure.jar
+mv service-0.4.0.jar griffin-service.jar
+# upload measure jar file
+hdfs dfs -put griffin-measure.jar /griffin/
+```
+
+Startup service.jar,run Griffin management service.
+```
+cd $GRIFFIN_HOME
+nohup java -jar griffin-service.jar>service.out 2>&1 &
+```
+
+After a few seconds, we can visit our default UI of Apache Griffin (by default the port of spring boot is 8080).
+```
+http://<your IP>:8080
+```
+
+You can conduct UI operations following the steps [here](../ui/user-guide.md).
+
+**Note**: The UI does not support all the backend features, to experience the advanced features you can use service's [api](../service/api-guide.md) directly.
+
+##### Griffin Customization
+- Compression
+
+Griffin Service is regular Spring Boot application, so it supports all customizations from Spring Boot.
+To enable output compression, the following should be added to `application.properties`:
+```
+server.compression.enabled=true
+server.compression.mime-types=application/json,application/xml,text/html,\
+                              text/xml,text/plain,application/javascript,text/css
+```
+
+- SSL
+
+It is possible to enable SSL encryption for api and web endpoints. To do that, you will need to prepare keystore in Spring-compatible format (for example, PKCS12), and add the following values to `application.properties`:
+```
+server.ssl.key-store=/path/to/keystore.p12
+server.ssl.key-store-password=yourpassword
+server.ssl.keyStoreType=PKCS12
+server.ssl.keyAlias=your_key_alias
+```
+
+- LDAP
+
+The following properties are available for LDAP:
+ - **ldap.url**: URL of LDAP server.
+ - **ldap.email**: Arbitrary suffix added to user's login before search, can be empty string. Used when user's DN contains some common suffix, and there is no bindDN specified. In this case, string after concatenation is used as DN for sending initial bind request.
+ - **ldap.searchBase**: Subtree DN to search.
+ - **ldap.searchPattern**: Filter expression, substring `{0}` is replaced with user's login after ldap.email is concatenated. This expression is used to find user object in LDAP. Access is denied if filter expression did not match any users.
+ - **ldap.sslSkipVerify**: Allows to disable certificate validation for secure LDAP servers.
+ - **ldap.bindDN**: Optional DN of service account used for user lookup. Useful if user's DN is different than attribute used as user's login, or if users' DNs are ambiguous.
+ - **ldap.bindPassword**: Optional password of bind service account.
+
+#### Launch Griffin Demo
+
+* **create hadoop folder**
+```bash
+$ hdfs dfs -ls /
+Found 3 items
+drwxr-xr-x   - king supergroup          0 2019-02-21 17:25 /data
+drwx-wx-wx   - king supergroup          0 2019-02-21 16:45 /tmp
+drwxr-xr-x   - king supergroup          0 2019-02-26 08:48 /user
+
+$ hdfs dfs -mkdir /griffin
+
+$ hdfs dfs -ls /
+Found 4 items
+drwxr-xr-x   - king supergroup          0 2019-02-21 17:25 /data
+drwxr-xr-x   - king supergroup          0 2019-02-26 10:30 /griffin
+drwx-wx-wx   - king supergroup          0 2019-02-21 16:45 /tmp
+drwxr-xr-x   - king supergroup          0 2019-02-26 08:48 /user
+
+$ hdfs dfs -put griffin-measure.jar /griffin/
+
+$ hdfs dfs -ls /griffin
+-rw-r--r--   1 king supergroup   30927307 2019-02-26 10:36 /griffin/griffin-measure.jar
+```
+Here you can refer to [dfs commands](http://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-hdfs/HDFSCommands.html#dfs), 
+get command [examples](http://fibrevillage.com/storage/630-using-hdfs-command-line-to-manage-files-and-directories-on-hadoop). 
+
+* **integrate hadoop and hive service**
+```bash
+# create /home/spark_conf
+# -p option behavior is much like Unix mkdir -p, creating parent directories along the path.
+hdfs dfs -mkdir -p /home/spark_conf
+
+# upload hive-site.xml
+hdfs dfs -put hive-site.xml /home/spark_conf/
+```
+
+* **prepare demo tables**
+```bash
+# login hive client
+/apache/hive/bin/hive --database default
+
+# create demo tables
+hive> CREATE EXTERNAL TABLE `demo_src`(
+  `id` bigint,
+  `age` int,
+  `desc` string) 
+PARTITIONED BY (
+  `dt` string,
+  `hour` string)
+ROW FORMAT DELIMITED
+  FIELDS TERMINATED BY '|'
+LOCATION
+  'hdfs://127.0.0.1:9000/griffin/data/batch/demo_src';
+  
+hive> CREATE EXTERNAL TABLE `demo_tgt`(
+  `id` bigint,
+  `age` int,
+  `desc` string) 
+PARTITIONED BY (
+  `dt` string,
+  `hour` string)
+ROW FORMAT DELIMITED
+  FIELDS TERMINATED BY '|'
+LOCATION
+  'hdfs://127.0.0.1:9000/griffin/data/batch/demo_tgt';
+
+# check tables created  
+hive> show tables;
+OK
+demo_src
+demo_tgt
+Time taken: 0.04 seconds, Fetched: 2 row(s)
+```
+
+Check table definition.
+```bash
+hive> show create table demo_src;
+OK
+CREATE EXTERNAL TABLE `demo_src`(
+  `id` bigint, 
+  `age` int, 
+  `desc` string)
+PARTITIONED BY ( 
+  `dt` string, 
+  `hour` string)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+  'hdfs://127.0.0.1:9000/griffin/data/batch/demo_src'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1551168613')
+Time taken: 3.762 seconds, Fetched: 20 row(s)
+```
+
+If the table definition is not correct, drop it.
+```bash
+hive> drop table if exists demo_src;
+OK
+Time taken: 3.764 seconds
+hive> drop table if exists demo_tgt;
+OK
+Time taken: 0.632 seconds
+```
+
+* **spawn demo data**
+There has been a script spawning test data, you can fetch it from [batch data](http://griffin.apache.org/data/batch/).
+And then execute ./gen_demo_data.sh to get the two data source files.
+```bash
+/apache/data/demo$ wget http://griffin.apache.org/data/batch/gen_demo_data.sh
+/apache/data/demo$ wget http://griffin.apache.org/data/batch/gen_delta_src.sh
+/apache/data/demo$ wget http://griffin.apache.org/data/batch/demo_basic
+/apache/data/demo$ wget http://griffin.apache.org/data/batch/delta_tgt
+/apache/data/demo$ wget http://griffin.apache.org/data/batch/insert-data.hql.template
+/apache/data/demo$ chmod 755 *.sh
+/apache/data/demo$ ./gen_demo_data.sh
+```
+
+Create gen-hive-data.sh
+```
+#!/bin/bash
+
+#create table
+hive -f create-table.hql
+echo "create table done"
+
+#current hour
+sudo ./gen_demo_data.sh
+cur_date=`date +%Y%m%d%H`
+dt=${cur_date:0:8}
+hour=${cur_date:8:2}
+partition_date="dt='$dt',hour='$hour'"
+sed s/PARTITION_DATE/$partition_date/ ./insert-data.hql.template > insert-data.hql
+hive -f insert-data.hql
+src_done_path=/griffin/data/batch/demo_src/dt=${dt}/hour=${hour}/_DONE
+tgt_done_path=/griffin/data/batch/demo_tgt/dt=${dt}/hour=${hour}/_DONE
+hadoop fs -mkdir -p /griffin/data/batch/demo_src/dt=${dt}/hour=${hour}
+hadoop fs -mkdir -p /griffin/data/batch/demo_tgt/dt=${dt}/hour=${hour}
+hadoop fs -touchz ${src_done_path}
+hadoop fs -touchz ${tgt_done_path}
+echo "insert data [$partition_date] done"
+
+#last hour
+sudo ./gen_demo_data.sh
+cur_date=`date -d '1 hour ago' +%Y%m%d%H`
+dt=${cur_date:0:8}
+hour=${cur_date:8:2}
+partition_date="dt='$dt',hour='$hour'"
+sed s/PARTITION_DATE/$partition_date/ ./insert-data.hql.template > insert-data.hql
+hive -f insert-data.hql
+src_done_path=/griffin/data/batch/demo_src/dt=${dt}/hour=${hour}/_DONE
+tgt_done_path=/griffin/data/batch/demo_tgt/dt=${dt}/hour=${hour}/_DONE
+hadoop fs -mkdir -p /griffin/data/batch/demo_src/dt=${dt}/hour=${hour}
+hadoop fs -mkdir -p /griffin/data/batch/demo_tgt/dt=${dt}/hour=${hour}
+hadoop fs -touchz ${src_done_path}
+hadoop fs -touchz ${tgt_done_path}
+echo "insert data [$partition_date] done"
+
+#next hours
+set +e
+while true
+do
+  sudo ./gen_demo_data.sh
+  cur_date=`date +%Y%m%d%H`
+  next_date=`date -d "+1hour" '+%Y%m%d%H'`
+  dt=${next_date:0:8}
+  hour=${next_date:8:2}
+  partition_date="dt='$dt',hour='$hour'"
+  sed s/PARTITION_DATE/$partition_date/ ./insert-data.hql.template > insert-data.hql
+  hive -f insert-data.hql
+  src_done_path=/griffin/data/batch/demo_src/dt=${dt}/hour=${hour}/_DONE
+  tgt_done_path=/griffin/data/batch/demo_tgt/dt=${dt}/hour=${hour}/_DONE
+  hadoop fs -mkdir -p /griffin/data/batch/demo_src/dt=${dt}/hour=${hour}
+  hadoop fs -mkdir -p /griffin/data/batch/demo_tgt/dt=${dt}/hour=${hour}
+  hadoop fs -touchz ${src_done_path}
+  hadoop fs -touchz ${tgt_done_path}
+  echo "insert data [$partition_date] done"
+  sleep 3600
+done
+set -e
+```
+
+Then we will load data into both two tables for every hour.
+```bash
+/apache/data/demo$ ./gen-hive-data.sh
+```
+
+After a while, you can query demo data from hive table.
+```bash
+hive> select * from demo_src;
+124	935	935	20190226	17
+124	838	838	20190226	17
+124	631	631	20190226	17
+......
+Time taken: 2.19 seconds, Fetched: 375000 row(s)
+```
+
+See related data folder created on hdfs. 
+```bash
+$ hdfs dfs -ls /griffin/data/batch
+drwxr-xr-x   - king supergroup          0 2019-02-26 16:13 /griffin/data/batch/demo_src
+drwxr-xr-x   - king supergroup          0 2019-02-26 16:13 /griffin/data/batch/demo_tgt
+
+$ hdfs dfs -ls /griffin/data/batch/demo_src/
+drwxr-xr-x   - king supergroup          0 2019-02-26 16:14 /griffin/data/batch/demo_src/dt=20190226
+```
+
+You need to create Elasticsearch index in advance, in order to set number of shards, replicas, and other settings to desired values:
+```
+curl -k -H "Content-Type: application/json" -X PUT http://127.0.0.1:9200/griffin \
+ -d '{
     "aliases": {},
     "mappings": {
         "accuracy": {
@@ -326,145 +834,8 @@
             "number_of_shards": "5"
         }
     }
-}
-'
+}'
 ```
-You should also modify some configurations of Apache Griffin for your environment.
+You can access http://127.0.0.1:9200/griffin to verify configuration.
 
-- <b>service/src/main/resources/application.properties</b>
-
-    ```
-    # Apache Griffin server port (default 8080)
-    server.port = 8080
-    # jpa
-    spring.datasource.url = jdbc:postgresql://<your IP>:5432/quartz?autoReconnect=true&useSSL=false
-    spring.datasource.username = <user name>
-    spring.datasource.password = <password>
-    spring.jpa.generate-ddl=true
-    spring.datasource.driverClassName = org.postgresql.Driver
-    spring.jpa.show-sql = true
-
-    # hive metastore
-    hive.metastore.uris = thrift://<your IP>:9083
-    hive.metastore.dbname = <hive database name>    # default is "default"
-
-    # external properties directory location, ignore it if not required
-    external.config.location =
-
-	# login strategy, default is "default"
-	login.strategy = <default or ldap>
-
-	# ldap properties, ignore them if ldap is not enabled
-	ldap.url = ldap://hostname:port
-	ldap.email = @example.com
-	ldap.searchBase = DC=org,DC=example
-	ldap.searchPattern = (sAMAccountName={0})
-
-	# hdfs, ignore it if you do not need predicate job
-	fs.defaultFS = hdfs://<hdfs-default-name>
-
-	# elasticsearch
-	elasticsearch.host = <your IP>
-	elasticsearch.port = <your elasticsearch rest port>
-	# authentication properties, uncomment if basic authentication is enabled
-	# elasticsearch.user = user
-	# elasticsearch.password = password
-	# livy
-	# Port Livy: 8998 Livy2:8999
-	livy.uri=http://localhost:8999/batches
-
-	# yarn url
-	yarn.uri=http://localhost:8088
-
-	
-    ```
-
-- <b>service/src/main/resources/sparkProperties.json</b>
-    ```
-	{
-	  "file": "hdfs:///<griffin measure path>/griffin-measure.jar",
-	  "className": "org.apache.griffin.measure.Application",
-	  "name": "griffin",
-	  "queue": "default",
-	  "numExecutors": 3,
-	  "executorCores": 1,
-	  "driverMemory": "1g",
-	  "executorMemory": "1g",
-	  "conf": {
-		"spark.yarn.dist.files": "hdfs:///<path to>/hive-site.xml"
-	 },
-	  "files": [
-	  ],
-	  "jars": [
-	  ]
-	}
-
-    ```
-    - \<griffin measure path> is the location where you should put the jar file of measure module.
-
-- <b>service/src/main/resources/env/env_batch.json</b>
-
-    Adjust sinks according to your requirement. At least, you will need to adjust HDFS output
-    directory (hdfs:///griffin/persist by default), and Elasticsearch URL (http://es:9200/griffin/accuracy by default).
-    Similar changes are required in `env_streaming.json`.
-
-#### Compression
-
-Griffin Service is regular Spring Boot application, so it supports all customizations from Spring Boot.
-To enable output compression, the following should be added to `application.properties`:
-```
-server.compression.enabled=true
-server.compression.mime-types=application/json,application/xml,text/html,text/xml,text/plain,application/javascript,text/css
-```
-
-#### SSL
-
-It is possible to enable SSL encryption for api and web endpoints. To do that, you will need to prepare keystore in Spring-compatible format (for example, PKCS12), and add the following values to `application.properties`:
-```
-server.ssl.key-store=/path/to/keystore.p12
-server.ssl.key-store-password=yourpassword
-server.ssl.keyStoreType=PKCS12
-server.ssl.keyAlias=your_key_alias
-```
-
-#### LDAP
-
-The following properties are available for LDAP:
- - **ldap.url**: URL of LDAP server.
- - **ldap.email**: Arbitrary suffix added to user's login before search, can be empty string. Used when user's DN contains some common suffix, and there is no bindDN specified. In this case, string after concatenation is used as DN for sending initial bind request.
- - **ldap.searchBase**: Subtree DN to search.
- - **ldap.searchPattern**: Filter expression, substring `{0}` is replaced with user's login after ldap.email is concatenated. This expression is used to find user object in LDAP. Access is denied if filter expression did not match any users.
- - **ldap.sslSkipVerify**: Allows to disable certificate validation for secure LDAP servers.
- - **ldap.bindDN**: Optional DN of service account used for user lookup. Useful if user's DN is different than attribute used as user's login, or if users' DNs are ambiguous.
- - **ldap.bindPassword**: Optional password of bind service account.
-
-### Build and Run
-
-Build the whole project and deploy. (NPM should be installed)
-
-  ```
-  mvn clean install
-  ```
-
-Put jar file of measure module into \<griffin measure path> in HDFS
-
-```
-cp measure/target/measure-<version>-incubating-SNAPSHOT.jar measure/target/griffin-measure.jar
-hdfs dfs -put measure/target/griffin-measure.jar <griffin measure path>/
-  ```
-
-After all environment services startup, we can start our server.
-
-  ```
-  java -jar service/target/service.jar
-  ```
-
-After a few seconds, we can visit our default UI of Apache Griffin (by default the port of spring boot is 8080).
-
-  ```
-  http://<your IP>:8080
-  ```
-
-You can use UI following the steps [here](../ui/user-guide.md).
-
-**Note**: The UI does not support all the backend features, to experience the advanced features you can use services directly.
+Everything is ready, you can login http://127.0.0.1:8080 without username and credentials. And then create measure, job to validate data quality by [user guide](../ui/user-guide.md).
\ No newline at end of file
diff --git a/griffin-doc/dev/dev-env-build.md b/griffin-doc/dev/dev-env-build.md
index f61b55e..3164439 100644
--- a/griffin-doc/dev/dev-env-build.md
+++ b/griffin-doc/dev/dev-env-build.md
@@ -78,8 +78,10 @@
 
 For debug purpose, you'd better install hadoop, spark, hive locally, so you can test your program more quickly.
 
+Note: If you run Hadoop in a pseudo-distributed mode on MacOS you have to update hdfs-site.xml, namely, comment out the parameters **dfs.namenode.servicerpc-address** and **dfs.namenode.rpc-address**, otherwise you can get the error like: "java.net.ConnectException: Call From mycomputer/127.0.1.1 to localhost:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused"
+
 ## Deploy on docker container
-Firstly, in the griffin directory, build you packages at once.
+Firstly, in the griffin directory, build your packages at once.
 ```
 mvn clean install
 ```
diff --git a/griffin-doc/measure/dsl-guide.md b/griffin-doc/measure/dsl-guide.md
index 5296176..b9a4b8c 100644
--- a/griffin-doc/measure/dsl-guide.md
+++ b/griffin-doc/measure/dsl-guide.md
@@ -127,6 +127,14 @@
 Distinctness rule expression in Apache Griffin DSL is a list of selection expressions separated by comma, indicates the columns to check if is distinct.
     e.g. `name, age`, `name, (age + 1) as next_age`
 
+### Uniqueness Rule
+Uniqueness rule expression in Apache Griffin DSL is a list of selection expressions separated by comma, indicates the columns to check if is unique. The uniqueness indicates the items without any replica of data.
+    e.g. `name, age`, `name, (age + 1) as next_age`
+
+### Completeness Rule
+Completeness rule expression in Apache Griffin DSL is a list of selection expressions separated by comma, indicates the columns to check if is null.
+    e.g. `name, age`, `name, (age + 1) as next_age`
+
 ### Timeliness Rule
 Timeliness rule expression in Apache Griffin DSL is a list of selection expressions separated by comma, indicates the input time and output time (calculate time as default if not set).  
 	e.g. `ts`, `ts, end_ts`
@@ -167,6 +175,12 @@
 
 After the translation, the metrics will be persisted in table `distinct_metric` and `dup_metric`.
 
+### Completeness
+For completeness, is to check for null. The columns you measure are incomplete if they are null. 
+- **total count of source**: `SELECT COUNT(*) AS total FROM source`, save as table `total_count`.
+- **incomplete metric**: `SELECT count(*) as incomplete FROM source WHERE NOT (id IS NOT NULL)`, save as table `incomplete_count`.
+- **complete metric**: `SELECT (source.total - incomplete_count.incomplete) AS complete FROM source LEFT JOIN incomplete_count`, save as table `complete_count`.
+
 ### Timeliness
 For timeliness, is to measure the latency of each item, and get the statistics of the latencies.  
 For example, the dsl rule is `ts, out_ts`, the first column means the input time of item, the second column means the output time of item, if not set, `__tmst` will be the default output time column. After the translation, the sql rule is as below:  
diff --git a/griffin-doc/measure/measure-configuration-guide.md b/griffin-doc/measure/measure-configuration-guide.md
index 2522ee4..ac7b5c2 100644
--- a/griffin-doc/measure/measure-configuration-guide.md
+++ b/griffin-doc/measure/measure-configuration-guide.md
@@ -83,7 +83,7 @@
 - **griffin.checkpoint**: This field configures list of griffin checkpoint parameters, multiple cache ways are supported. It is only for streaming dq case. Details of info cache configuration [here](#griffin-checkpoint).
 
 ### <a name="sinks"></a>Sinks
-- **type**: Metrics and records sink type, "console", "hdfs", "http", "mongo". 
+- **type**: Metrics and records sink type, "console", "hdfs", "http", "mongo", "custom". 
 - **config**: Configure parameters of each sink type.
 	+ console sink (aliases: "log")
 		* max.log.lines: the max lines of log.
@@ -98,6 +98,12 @@
         * url: url of mongo db.
         * database: database name.
         * collection: collection name. 
+    + custom sink
+        * class: class name for user-provided data sink implementation
+        it should be implementing org.apache.griffin.measure.sink.Sink trait and have static method with signature
+		    ```def apply(ctx: SinkContext): Sink```. 
+        User-provided data sink should be present in Spark job's class path, by providing custom jar as -jar parameter
+		    to spark-submit or by adding to "jars" list in sparkProperties.json.
 
 ### <a name="griffin-checkpoint"></a>Griffin Checkpoint
 - **type**: Griffin checkpoint type, "zk" for zookeeper checkpoint.
@@ -238,10 +244,30 @@
     * num: the duplicate number name in metric, optional.
     * duplication.array: optional, if set as a non-empty string, the duplication metric will be computed, and the group metric name is this string.
     * with.accumulate: optional, default is true, if set as false, in streaming mode, the data set will not compare with old data to check distinctness.
+  + uniqueness dq type detail configuration
+    * source: name of data source to measure uniqueness.
+    * target: name of data source to compare with. It is always the same as source, or more than source.
+    * unique: the unique count name in metric, optional.
+    * total: the total count name in metric, optional.
+    * dup: the duplicate count name in metric, optional.
+    * num: the duplicate number name in metric, optional.
+    * duplication.array: optional, if set as a non-empty string, the duplication metric will be computed, and the group metric name is this string.
+  + completeness dq type detail configuration
+    * source: name of data source to measure completeness.
+    * total: name of data source to compare with. It is always the same as source, or more than source.
+    * complete: the column name in metric, optional. The number of not null values.
+    * incomplete: the column name in metric, optional. The number of null values.
   + timeliness dq type detail configuration
     * source: name of data source to measure timeliness.
     * latency: the latency column name in metric, optional.
+    * total: column name, optional.
+    * avg: column name, optional. The average latency.
+    * step: column nmae, optional. The histogram where "bin" is step=floor(latency/step.size).
+    * count: column name, optional. The number of the same latencies in the concrete step.
+    * percentile: column name, optional.
     * threshold: optional, if set as a time string like "1h", the items with latency more than 1 hour will be record.
+    * step.size: optional, used to build the histogram of latencies, in milliseconds (ex. "100").
+    * percentile.values: optional, used to compute the percentile metrics, values between 0 and 1. For instance, We can see fastest and slowest latencies if set [0.1, 0.9].
 - **cache**: Cache output dataframe. Optional, valid only for "spark-sql" and "df-ops" mode. Defaults to `false` if not specified.
 - **out**: List of output sinks for the job.
   + Metric output.
diff --git a/griffin-doc/measure/predicates.md b/griffin-doc/measure/predicates.md
new file mode 100644
index 0000000..389edbe
--- /dev/null
+++ b/griffin-doc/measure/predicates.md
@@ -0,0 +1,100 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+#About predicates
+
+##Overview
+The purpose of predicates is obligate Griffin to check certain conditions before starting SparkSubmitJob. 
+Depending on these conditions Griffin need to start or not start the measurement.
+
+##Configure predicates
+
+For configuring predicates need add property to measure json:
+```
+{
+    ...
+     "data.sources": [
+        ...
+         "connectors": [
+                   "predicates": [
+                       {
+                         "type": "file.exist",
+                         "config": {
+                           "root.path": "/path/to/",
+                           "path": "file.ext,file2.txt"
+                         }
+                       }
+                   ],
+         ...
+         
+     ]
+}
+```
+
+Possible values for predicates.type:
+- "file.exist" - in this case creates predicate with class org.apache.griffin.core.job.FileExistPredicator. This predicate checks existence of files before starting Spark jobs.
+ ```
+                     {
+                         "type": "file.exist",
+                         "config": {
+                           "root.path": "/path/to/",
+                           "path": "file.ext,file2.txt"
+                         }
+                       }
+```
+
+- "custom" - in this case required transmit class name in the property "class" in config. 
+This example creates same predicate like in previous example
+```
+                     {
+                         "type": "custom",
+                         "config": {
+                           "class": "org.apache.griffin.core.job.FileExistPredicator",
+                           "root.path": "/path/to/",
+                           "path": "file.ext,file2.txt"
+                         }
+                       }
+```
+It important to notice that predicate class must satisfy follow conditions:
+- implement interface **org.apache.griffin.core.job.Predicator**
+- have constructor with argument of type **org.apache.griffin.core.job.entity.SegmentPredicate**
+
+##Deployment custom predicates
+For the creating custom predicate you need 
+1. Build the Griffin service using command
+As a result, two artifacts will be built  
+- **service-VERSION.jar** - executable Spring-Boot application
+- **service-VERSION-lib.jar** - jar, which we can use as a dependency
+This step is necessary because we can't use executable Spring-Boot application as a dependency in our plugin. 
+2. Create module and add dependency that was built in previous step
+```
+         <dependency>
+             <groupId>org.apache.griffin</groupId>
+             <artifactId>service</artifactId>
+             <classifier>lib</classifier>
+             <version>${griffin.version}</version>
+             <scope>provided</scope>
+         </dependency>
+```
+3. Create a Predicate class, which should, as mentioned earlier, implement the Predicator interface and have a constructor with an argument of type SegmentPredicate
+4. Build the module into a jar file and put it in any folder (for example /path-to-jar)
+5. Start the Griffin service application using command 
+```
+java -cp target/service-VERSION.jar -Dloader.path=/path-to-jar/ org.springframework.boot.loader.PropertiesLauncher
+```
\ No newline at end of file
diff --git a/griffin-doc/service/api-guide.md b/griffin-doc/service/api-guide.md
index a7ab348..26435d6 100644
--- a/griffin-doc/service/api-guide.md
+++ b/griffin-doc/service/api-guide.md
@@ -26,14 +26,38 @@
 - [HTTP Response Design](#0)
 
 - [Griffin Basic](#1)
+    - [Get Version](#11)
 
-- [Measures](#2)
+- [Griffin Measures](#2)
+    - [Add Measure](#21)
+    - [Get Measure](#22)
+    - [Remove Measure](#23)
+    - [Update Measure](#24)
 
-- [Jobs](#3)
+- [Griffin Jobs](#3)
+    - [Add Job](#31)
+    - [Trigger job by id](#37)
+    - [Get Job](#32)
+    - [Remove Job](#33)
+    - [Get Job Instances](#34)
+    - [Get Job Instance by triggerKey](#38)
+    - [Get Job Healthy Statistics](#35)
+    - [Download Sample Records](#36)
+    - [Get Job Instance by Id](#38)
 
 - [Metrics](#4)
+    - [Get Metrics](#41)
+    - [Add Metric Value](#42)
+    - [Get Metric Value](#43)
+    - [Remove Metric Value](#44)
+    - [Get Metric Value by Job Instance Id](#45)
 
 - [Hive MetaStore](#5)
+    - [Get Table Metadata](#51)
+    - [Get Table Name](#52)
+    - [Get All Databases Metadata](#53)
+    - [Get Database Names](#54)
+    - [Get All Tables Metadata](#55)
 
 - [Auth](#6)
 
@@ -105,18 +129,24 @@
 
 ## Apache Griffin Basic
 
+<div id = "11"></div>
+
 ### Get Apache Griffin version
 `GET /api/v1/version`
 
-#### Response Body Sample
-`0.1.0`
+#### API Example
+```bash
+curl -k -H "Accept: application/json" -X GET http://127.0.0.1:8080/api/v1/version
+0.3.0
+```
 
 <h2 id = "2"></h2>
 
-## Measures
+## Griffin Measures
+
+<div id = "21"></div>
 
 ### Add measure
-
 `POST /api/v1/measures`
 
 #### Request Header
@@ -124,22 +154,18 @@
 | ------------ | ---------------- |
 | Content-Type | application/json |
 
-#### Request Body
+#### API Example
+There are two kinds of measures, Apache Griffin measure and external measure.
+<br>The measure's 'dq.type' can either be 'ACCURACY' or 'PROFILING'.
 
-| name    | description    | type    |
-| ------- | -------------- | ------- |
-| measure | measure entity | Measure |
-
-#### Request Body example 
-
-There are two kind of different measures, Apache Griffin measure and external measure. And for each type of measure, the 'dq.type' can be 'accuracy' or 'profiling'.
-
-Here is a request body example to create a Apache Griffin measure of  profiling:
+Here is an example to define measure of profiling:
 ```
-{
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X POST http://127.0.0.1:8080/api/v1/measures \
+-d '{
     "name":"profiling_measure",
     "measure.type":"griffin",
-    "dq.type":"profiling",
+    "dq.type":"PROFILING",
     "rule.description":{
         "details":[
             {
@@ -148,7 +174,7 @@
             }
         ]
     },
-    "process.type":"batch",
+    "process.type":"BATCH",
     "owner":"test",
     "description":"measure description",
     "data.sources":[
@@ -157,7 +183,7 @@
             "connectors":[
                 {
                     "name":"connector_name",
-                    "type":"hive",
+                    "type":"HIVE",
                     "version":"1.2",
                     "data.unit":"1hour",
                     "data.time.zone":"UTC(WET,GMT)",
@@ -183,24 +209,24 @@
         "rules":[
             {
                 "dsl.type":"griffin-dsl",
-                "dq.type":"profiling",
+                "dq.type":"PROFILING",
                 "rule":"count(source.`age`) AS `age-count`,avg(source.`age`) AS `age-average`",
                 "name":"profiling",
-                "details":{
-
-                }
+                "details":{}
             }
         ]
     }
-}
+}'
 ```
-And for Apache Griffin measure of accuracy:
+Here is an example to define measure of accuracy:
 ```
-{
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X POST http://127.0.0.1:8080/api/v1/measures \
+-d '{
     "name":"accuracy_measure",
     "measure.type":"griffin",
-    "dq.type":"accuracy",
-    "process.type":"batch",
+    "dq.type":"ACCURACY",
+    "process.type":"BATCH",
     "owner":"test",
     "description":"measure description",
     "data.sources":[
@@ -261,142 +287,66 @@
         "rules":[
             {
                 "dsl.type":"griffin-dsl",
-                "dq.type":"accuracy",
+                "dq.type":"ACCURACY",
                 "name":"accuracy",
                 "rule":"source.desc=target.desc"
             }
         ]
     }
-}
+}'
 ```
-Example of request body to create external measure:
+Here is an example to define external measure:
 ```
-{
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X POST http://127.0.0.1:8080/api/v1/measures \
+-d '{
     "name": "external_name",
     "measure.type": "external",
-    "dq.type": "accuracy",
+    "dq.type": "ACCURACY",
     "description": "measure description",
     "organization": "orgName",
     "owner": "test",
-    "metricName": "metricName"
-}
-```
-#### Response Body Sample
-
-The response body should be the created measure if success. For example:
-```
-{
-    "measure.type": "griffin",
-    "id": 1,
-    "name": "measureName",
-    "description": "measure description",
-    "organization": "orgName",
-    "owner": "test",
-    "deleted": false,
-    "dq.type": "accuracy",
-    "process.type": "batch",
-    "data.sources": [
-        {
-            "id": 1,
-            "name": "source",
-            "connectors": [
-                {
-                    "id": 1,
-                    "name": "connector_name_source",
-                    "type": "HIVE",
-                    "version": "1.2",
-                    "predicates": [
-                        {
-                            "id": 1,
-                            "type": "file.exist",
-                            "config": {
-                                "root.path": "hdfs:///griffin/demo_src",
-                                "path": "/dt=#YYYYMMdd#/hour=#HH#/_DONE"
-                            }
-                        }
-                    ],
-                    "data.unit": "1h",
-                    "config": {
-                        "database": "default",
-                        "table.name": "demo_src",
-                        "where": "dt=#YYYYMMdd# AND hour=#HH#"
-                    }
-                }
-            ]
-        },
-        {
-            "id": 2,
-            "name": "target",
-            "connectors": [
-                {
-                    "id": 2,
-                    "name": "connector_name_target",
-                    "type": "HIVE",
-                    "version": "1.2",
-                    "predicates": [
-                        {
-                            "id": 2,
-                            "type": "file.exist",
-                            "config": {
-                                "root.path": "hdfs:///griffin/demo_src",
-                                "path": "/dt=#YYYYMMdd#/hour=#HH#/_DONE"
-                            }
-                        }
-                    ],
-                    "data.unit": "1h",
-                    "config": {
-                        "database": "default",
-                        "table.name": "demo_src",
-                        "where": "dt=#YYYYMMdd# AND hour=#HH#"
-                    }
-                }
-            ]
-        }
-    ],
-    "evaluate.rule": {
-        "id": 1,
-        "rules": [
-            {
-                "id": 1,
-                "rule": "source.desc=target.desc",
-                "name": "rule_name",
-                "description": "Total count",
-                "dsl.type": "griffin-dsl",
-                "dq.type": "accuracy",
-                "details": {}
-            }
-        ]
-    }
-}
+    "metric.name": "metricName"
+}'
 ```
 
-### Get measures
-`GET /api/v1/measures`
-#### Response Body Sample
-```
-[
-    {
+<div id = "22"></div>
+
+### Get measure
+`GET /api/v1/measures`<br>
+`GET /api/v1/measures/{measure_id}`
+
+#### API Example
+```bash
+curl -k -H "Accept: application/json" -X GET http://127.0.0.1:8080/api/v1/measures
+[{
         "measure.type": "griffin",
-        "id": 4,
-        "name": "measure_no_predicate_day",
+        "id": 1,
+        "name": "accuracy_measure",
         "owner": "test",
-        "description": null,
-        "organization": null,
+        "description": "measure description",
         "deleted": false,
-        "dq.type": "accuracy",
-        "process.type": "batch",
-        "data.sources": [
-            {
-                "id": 6,
+        "dq.type": "ACCURACY",
+        "sinks": ["ELASTICSEARCH", "HDFS"],
+        "process.type": "BATCH",
+        "data.sources": [{
+                "id": 4,
                 "name": "source",
-                "connectors": [
-                    {
-                        "id": 6,
-                        "name": "source1517994133405",
+                "connectors": [{
+                        "id": 5,
+                        "name": "connector_name_source",
                         "type": "HIVE",
                         "version": "1.2",
-                        "predicates": [],
-                        "data.unit": "1day",
+                        "predicates": [{
+                                "id": 6,
+                                "type": "file.exist",
+                                "config": {
+                                    "root.path": "hdfs:///127.0.0.1/demo_src",
+                                    "path": "/dt=#YYYYMMdd#/hour=#HH#/_DONE"
+                                }
+                            }
+                        ],
+                        "data.unit": "1hour",
                         "data.time.zone": "UTC(WET,GMT)",
                         "config": {
                             "database": "default",
@@ -404,19 +354,26 @@
                             "where": "dt=#YYYYMMdd# AND hour=#HH#"
                         }
                     }
-                ]
-            },
-            {
+                ],
+                "baseline": false
+            }, {
                 "id": 7,
                 "name": "target",
-                "connectors": [
-                    {
-                        "id": 7,
-                        "name": "target1517994142573",
+                "connectors": [{
+                        "id": 8,
+                        "name": "connector_name_target",
                         "type": "HIVE",
                         "version": "1.2",
-                        "predicates": [],
-                        "data.unit": "1day",
+                        "predicates": [{
+                                "id": 9,
+                                "type": "file.exist",
+                                "config": {
+                                    "root.path": "hdfs:///127.0.0.1/demo_src",
+                                    "path": "/dt=#YYYYMMdd#/hour=#HH#/_DONE"
+                                }
+                            }
+                        ],
+                        "data.unit": "1hour",
                         "data.time.zone": "UTC(WET,GMT)",
                         "config": {
                             "database": "default",
@@ -424,65 +381,73 @@
                             "where": "dt=#YYYYMMdd# AND hour=#HH#"
                         }
                     }
-                ]
+                ],
+                "baseline": false
             }
         ],
         "evaluate.rule": {
-            "id": 4,
-            "rules": [
-                {
-                    "id": 4,
-                    "rule": "source.age=target.age AND source.desc=target.desc",
-                    "name": "accuracy",
+            "id": 2,
+            "rules": [{
+                    "id": 3,
+                    "rule": "source.desc=target.desc",
                     "dsl.type": "griffin-dsl",
-                    "dq.type": "accuracy"
+                    "dq.type": "ACCURACY"
                 }
             ]
-        }
+        },
+        "measure.type": "griffin"
     }
 ]
 ```
 
+<div id = "23"></div>
+
+### Remove measure
+`DELETE /api/v1/measures/{measure_id}`
+When deleting a measure,api will also delete related jobs.
+#### API example
+```bash
+curl -k -H "Accept: application/json" -X DELETE http://127.0.0.1:8080/api/v1/measures/1
+```
+The response body should be empty if no error happens, and the HTTP status is (204, "No Content").
+
+<div id = "24"></div>
 
 ### Update measure
 `PUT /api/v1/measures`
-#### Request Header
-| key          | value            |
-| ------------ | ---------------- |
-| Content-Type | application/json |
 
-#### Request Body
-| name    | description    | type    |
-| ------- | -------------- | ------- |
-| measure | measure entity | Measure |
-
-#### Request Body example 
-There are two kind of different measures, Apache Griffin measure and external measure. And for each type of measure, the 'dq.type' can be 'accuracy' or 'profiling'.
-
-Here is a request body example to update a Apache Griffin measure of accuracy:
+#### API example
+Here is an example to update measure:
 ```
-{
-    "id": 1,
-    "name": "measureName_edit",
-    "description": "measure description",
-    "organization": "orgName",
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X PUT http://127.0.0.1:8080/api/v1/measures \
+-d '{
+    "measure.type": "griffin",
+    "id": 19,
+    "name": "profiling_measure_edited",
     "owner": "test",
+    "description": "measure description",
     "deleted": false,
-    "dq.type": "accuracy",
-    "process.type": "batch",
-    "data.sources": [
-        {
-            "id": 1,
+    "dq.type": "PROFILING",
+    "sinks": ["ELASTICSEARCH", "HDFS"],
+    "process.type": "BATCH",
+    "rule.description": {
+        "details": [{
+                "name": "age",
+                "infos": "Total Count,Average"
+            }
+        ]
+    },
+    "data.sources": [{
+            "id": 22,
             "name": "source",
-            "connectors": [
-                {
-                    "id": 1,
-                    "name": "connector_name_source",
+            "connectors": [{
+                    "id": 23,
+                    "name": "connector_name",
                     "type": "HIVE",
                     "version": "1.2",
-                    "predicates": [
-                        {
-                            "id": 1,
+                    "predicates": [{
+                            "id": 24,
                             "type": "file.exist",
                             "config": {
                                 "root.path": "hdfs:///griffin/demo_src",
@@ -490,204 +455,81 @@
                             }
                         }
                     ],
-                    "data.unit": "1h",
+                    "data.unit": "1hour",
+                    "data.time.zone": "UTC(WET,GMT)",
                     "config": {
                         "database": "default",
                         "table.name": "demo_src",
                         "where": "dt=#YYYYMMdd# AND hour=#HH#"
                     }
                 }
-            ]
-        },
-        {
-            "id": 2,
-            "name": "target",
-            "connectors": [
-                {
-                    "id": 2,
-                    "name": "connector_name_target",
-                    "type": "HIVE",
-                    "version": "1.2",
-                    "predicates": [
-                        {
-                            "id": 2,
-                            "type": "file.exist",
-                            "config": {
-                                "root.path": "hdfs:///griffin/demo_src",
-                                "path": "/dt=#YYYYMMdd#/hour=#HH#/_DONE"
-                            }
-                        }
-                    ],
-                    "data.unit": "1h",
-                    "config": {
-                        "database": "default",
-                        "table.name": "demo_src",
-                        "where": "dt=#YYYYMMdd# AND hour=#HH#"
-                    }
-                }
-            ]
+            ],
+            "baseline": false
         }
     ],
     "evaluate.rule": {
-        "id": 1,
-        "rules": [
-            {
-                "id": 1,
-                "rule": "source.desc=target.desc",
-                "name": "rule_name",
-                "description": "Total count",
+        "id": 20,
+        "rules": [{
+                "id": 21,
+                "rule": "count(source.`age`) AS `age-count`,avg(source.`age`) AS `age-average`",
                 "dsl.type": "griffin-dsl",
-                "dq.type": "accuracy",
+                "dq.type": "PROFILING",
                 "details": {}
             }
         ]
     },
     "measure.type": "griffin"
-}
+}'
 ```
-If you want to update an external measure, you can use following example json in request body.
+Here is an example to update external measure:
 ```
-{
-	"id":1,
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X PUT http://127.0.0.1:8080/api/v1/measures \
+-d '{
     "measure.type": "external",
-    "dq.type": "accuracy",
+    "id": 25,
     "name": "external_name",
-    "description": " update test measure",
+    "owner": "test",
+    "description": "measure description edited",
     "organization": "orgName",
-    "owner": "test",
-    "metricName": "metricName"
-}
-```
-#### Response Body Sample
-The response body should be empty if no error happens, and the HTTP status is (204, "No Content").
-
-### Delete measure
-`DELETE /api/v1/measures/{id}`
-When deleting a measure,api will also delete related jobs.
-#### Path Variable
-- id -`required` `Long` measure id
-
-#### Request Sample
-
-`/api/v1/measures/1`
-
-#### Response Body Sample
-
-The response body should be empty if no error happens, and the HTTP status is (204, "No Content").
-
-### Get measure by id
-`GET /api/v1/measures/{id}`
-#### Path Variable
-- id -`required` `Long` measure id
-
-#### Request Sample
-
-`/api/v1/measures/1`
-
-#### Response Body Sample
-```
-{
-    "measure.type": "griffin",
-    "id": 4,
-    "name": "measure_no_predicate_day",
-    "owner": "test",
-    "description": null,
-    "organization": null,
     "deleted": false,
-    "dq.type": "accuracy",
-    "process.type": "batch",
-    "data.sources": [
-        {
-            "id": 6,
-            "name": "source",
-            "connectors": [
-                {
-                    "id": 6,
-                    "name": "source1517994133405",
-                    "type": "HIVE",
-                    "version": "1.2",
-                    "predicates": [],
-                    "data.unit": "1day",
-                    "data.time.zone": "UTC(WET,GMT)",
-                    "config": {
-                        "database": "default",
-                        "table.name": "demo_src",
-                        "where": "dt=#YYYYMMdd# AND hour=#HH#"
-                    }
-                }
-            ]
-        },
-        {
-            "id": 7,
-            "name": "target",
-            "connectors": [
-                {
-                    "id": 7,
-                    "name": "target1517994142573",
-                    "type": "HIVE",
-                    "version": "1.2",
-                    "predicates": [],
-                    "data.unit": "1day",
-                    "data.time.zone": "UTC(WET,GMT)",
-                    "config": {
-                        "database": "default",
-                        "table.name": "demo_tgt",
-                        "where": "dt=#YYYYMMdd# AND hour=#HH#"
-                    }
-                }
-            ]
-        }
-    ],
-    "evaluate.rule": {
-        "id": 4,
-        "rules": [
-            {
-                "id": 4,
-                "rule": "source.age=target.age AND source.desc=target.desc",
-                "name": "accuracy",
-                "dsl.type": "griffin-dsl",
-                "dq.type": "accuracy"
-            }
-        ]
-    }
-}
+    "dq.type": "ACCURACY",
+    "sinks": ["ELASTICSEARCH", "HDFS"],
+    "metric.name": "metricName",
+    "measure.type": "external"
+}'
 ```
 
 <h2 id = "3"></h2>
 
-## Jobs
+## Griffin Jobs
+
+<div id = "31"></div>
 
 ### Add job
 
 `POST /api/v1/jobs`
 
-#### Request Header
-| key          | value            |
-| ------------ | ---------------- |
-| Content-Type | application/json |
-
-#### Request Body
-| name        | description                              | type        |
-| ----------- | ---------------------------------------- | ----------- |
-| jobSchedule | custom class composed of job key parameters | JobSchedule |
-
-#### Request Body Sample
+#### API Example
 ```
-{
-    "measure.id": 5,
-	"job.name":"job_name",
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X POST http://127.0.0.1:8080/api/v1/jobs \
+-d '{
+    "measure.id": 10,
+    "job.name":"job_name_10",
+    "job.type":"batch",
     "cron.expression": "0 0/4 * * * ?",
     "cron.time.zone": "GMT+8:00",
     "predicate.config": {
-		"checkdonefile.schedule":{
-			"interval": "1m",
-			"repeat": 2
-		}
+        "checkdonefile.schedule":{
+            "interval": "1m",
+            "repeat": 2
+        }
     },
     "data.segments": [
         {
             "data.connector.name": "connector_name_source",
-			"as.baseline":true, 
+            "as.baseline":true,
             "segment.range": {
                 "begin": "-1h",
                 "length": "1h"
@@ -701,15 +543,98 @@
             }
         }
     ]
+}'
+```
+
+<div id = "37"></div>
+
+### Trigger job by id
+`POST /api/v1/jobs/trigger/{job_id}`
+
+In the current version triggering the job in this way leads to scheduling of a single job instance. The method returns
+immediately even if starting it may take time. The response contains `triggerKey` by which the instance could be found
+when it is started (see [find instance by trigger key](#38)).
+
+#### API Example
+```
+curl -k -X POST http://127.0.0.1:8080/api/v1/jobs/trigger/101
+{
+    "triggerKey": "DEFAULT.6da64b5bd2ee-34e2cb23-11a2-4f92-9cbd-6cb3402cdb48",
 }
 ```
-#### Response Body Sample
-The response body should be the created job schedule if success. For example:
+
+<div id = "32"></div>
+
+### Get all jobs
+`GET /api/v1/jobs`
+
+#### API Example
 ```
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X GET http://127.0.0.1:8080/api/v1/jobs
+[{
+        "job.type": "batch",
+        "id": 51,
+        "measure.id": 10,
+        "job.name": "job_name_10",
+        "metric.name": "job_name_10",
+        "quartz.name": "job_name_10_1547192473206",
+        "quartz.group": "BA",
+        "cron.expression": "0 0/4 * * * ?",
+        "job.state": {
+            "state": "NORMAL",
+            "toStart": false,
+            "toStop": true,
+            "nextFireTime": 1547693040000,
+            "previousFireTime": 1547692800000
+        },
+        "cron.time.zone": "GMT+8:00",
+        "predicate.config": {
+            "checkdonefile.schedule": {
+                "interval": "1m",
+                "repeat": 2
+            }
+        },
+        "data.segments": [{
+                "id": 52,
+                "data.connector.name": "connector_name_source",
+                "as.baseline": true,
+                "segment.range": {
+                    "id": 53,
+                    "begin": "-1h",
+                    "length": "1h"
+                }
+            }, {
+                "id": 54,
+                "data.connector.name": "connector_name_target",
+                "as.baseline": false,
+                "segment.range": {
+                    "id": 55,
+                    "begin": "-1h",
+                    "length": "1h"
+                }
+            }
+        ],
+        "job.type": "batch"
+    }
+]
+```
+
+### Get a job by id
+`GET /api/v1/jobs/config?jobId={job_id}`
+
+#### API Example
+```
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X GET http://127.0.0.1:8080/api/v1/jobs/config?jobId=827
 {
-    "id": 3,
-    "measure.id": 5,
-    "job.name": "job_name",
+    "job.type": "batch",
+    "id": 827,
+    "measure.id": 10,
+    "job.name": "job_name_10",
+    "metric.name": "job_name_10",
+    "quartz.name": "job_name_10_1547694147531",
+    "quartz.group": "BA",
     "cron.expression": "0 0/4 * * * ?",
     "cron.time.zone": "GMT+8:00",
     "predicate.config": {
@@ -718,124 +643,54 @@
             "repeat": 2
         }
     },
-    "data.segments": [
-        {
-            "id": 5,
+    "data.segments": [{
+            "id": 828,
             "data.connector.name": "connector_name_source",
             "as.baseline": true,
             "segment.range": {
-                "id": 5,
+                "id": 829,
                 "begin": "-1h",
                 "length": "1h"
             }
-        },
-        {
-            "id": 6,
+        }, {
+            "id": 830,
             "data.connector.name": "connector_name_target",
             "as.baseline": false,
             "segment.range": {
-                "id": 6,
+                "id": 831,
                 "begin": "-1h",
                 "length": "1h"
             }
         }
-    ]
+    ],
+    "job.type": "batch"
 }
 ```
 
-### Get jobs
-`GET /api/v1/jobs`
-
-#### Response Body Sample
-```
-[
-    {
-        "jobId": 1,
-        "jobName": "job_name",
-        "measureId": 1,
-        "triggerState": "NORMAL",
-        "nextFireTime": 1515400080000,
-        "previousFireTime": 1515399840000,
-        "cronExpression": "0 0/4 * * * ?"
-    }
-]
-
-```
+<div id = "33"></div>
 
 ### Delete job by id
-`DELETE /api/v1/jobs/{id}`
-#### Path Variable
-- id -`required` `Long` job id
-
-#### Response Body Sample
-
-The response body should be empty if no error happens, and the HTTP status is (204, "No Content").
-
-### Get job schedule by job name
-`GET /api/v1/jobs/config/{jobName}`
-
-#### Path Variable
-- jobName -`required` `String` job name
-
-#### Request Sample
-
-`/api/v1/jobs/config/job_no_predicate_day`
-
-#### Response Sample
+`DELETE /api/v1/jobs/{job_id}`
+#### API Example
 ```
-{
-    "id": 2,
-    "measure.id": 4,
-    "job.name": "job_no_predicate_day",
-    "cron.expression": "0 0/4 * * * ?",
-    "cron.time.zone": "GMT-8:00",
-    "predicate.config": {
-        "checkdonefile.schedule": {
-            "repeat": "12",
-            "interval": "5m"
-        }
-    },
-    "data.segments": [
-        {
-            "id": 3,
-            "data.connector.name": "source1517994133405",
-            "as.baseline": true,
-            "segment.range": {
-                "id": 3,
-                "begin": "-2",
-                "length": "2"
-            }
-        },
-        {
-            "id": 4,
-            "data.connector.name": "target1517994142573",
-            "as.baseline": false,
-            "segment.range": {
-                "id": 4,
-                "begin": "-5",
-                "length": "2"
-            }
-        }
-    ]
-}
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X DELETE http://127.0.0.1:8080/api/v1/jobs/51
 ```
 
 ### Delete job by name
-`DELETE /api/v1/jobs`
-
-#### Request Parameter
-
-| name    | description | type   | example value |
-| ------- | ----------- | ------ | ------------- |
-| jobName | job name    | String | job_name      |
-
-#### Response Body Sample
+`DELETE /api/v1/jobs?jobName={name}`
+#### API Example
+```
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X DELETE http://127.0.0.1:8080/api/v1/jobs?jobName=job_name_10
+```
 
 The response body should be empty if no error happens, and the HTTP status is (204, "No Content").
 
+<div id = "34"></div>
 
 ### Get job instances
-`GET /api/v1/jobs/instances`
+`GET /api/v1/jobs/instances?jobId={id}&page={pageNum}&size={pageSize}`
 
 #### Request Parameter
 
@@ -845,68 +700,118 @@
 | page  | page you want starting from index 0 | int  | 0             |
 | size  | instance number per page            | int  | 10            |
 
-#### Response Body Sample
+#### API Example
 ```
-[
-    {
-        "id": 1,
+curl -k -G -X GET http://127.0.0.1:8080/api/v1/jobs/instances -d jobId=827 -d page=1 -d size=5
+[{
+        "id": 1176,
         "sessionId": null,
-        "state": "success",
-        "appId": null,
-        "appUri": null,
+        "state": "NOT_FOUND",
+        "type": "BATCH",
         "predicateGroup": "PG",
-        "predicateName": "job_name_predicate_1515399840077",
-        "deleted": true,
-        "timestamp": 1515399840092,
-        "expireTimestamp": 1516004640092
-    },
-    {
-        "id": 2,
+        "predicateName": "job_name_10_predicate_1547776800012",
+        "timestamp": 1547776800012,
+        "expireTimestamp": 1548381600012
+    }, {
+        "id": 1175,
         "sessionId": null,
-        "state": "not_found",
-        "appId": null,
-        "appUri": null,
+        "state": "NOT_FOUND",
+        "type": "BATCH",
         "predicateGroup": "PG",
-        "predicateName": "job_name_predicate_1515399840066",
-        "deleted": true,
-        "timestamp": 1515399840067,
-        "expireTimestamp": 1516004640067
+        "predicateName": "job_name_10_predicate_1547776560018",
+        "timestamp": 1547776560019,
+        "expireTimestamp": 1548381360019
     }
 ]
 ```
 
+<div id = "38"></div>
+
+### Find job instance by triggerKey
+`GET /api/v1/jobs/triggerKeys/{triggerKey}`
+
+This could be used after [triggering the job by job id](#37) to find the job instance when it is scheduled.
+In the current version no more than one instance is triggered and thus the response is a list with single
+element (or empty list if not found).
+
+```
+curl http://127.0.0.1:8080/api/v1/jobs/triggerKeys/DEFAULT.6da64b5bd2ee-34e2cb23-11a2-4f92-9cbd-6cb3402cdb48
+[
+    {
+        "id":201,
+        "sessionId":1,
+        "state":"SUCCESS",
+        "type":"BATCH",
+        "appId":"application_1554199833471_0002",
+        "appUri":"http://localhost:38088/cluster/app/application_1554199833471_0002",
+        "predicateGroup":"PG",
+        "predicateName":"acc1a_name_predicate_1554202748883",
+        "triggerKey":"DEFAULT.6da64b5bd2ee-34e2cb23-11a2-4f92-9cbd-6cb3402cdb49",
+        "timestamp":1554202748884,
+        "expireTimestamp":1554807548884
+    }
+]
+```
+
+
+<div id = "35"></div>
+
 ### Get job healthy statistics
 `GET /api/v1/jobs/health`
 
-#### Response Body Sample
+#### API Example
 ```
+curl -k -X GET http://127.0.0.1:8080/api/v1/jobs/health
 {
-  "healthyJobCount": 1,
-  "jobCount": 2
+	"healthyJobCount": 0,
+	"jobCount": 1
 }
 ```
 
-### Download sample missing/mismatched records
-`GET /api/v1/jobs/download?hdfsPath={missingDataFilePath}`
+<div id = "36"></div>
 
-#### Response
-```
-If successful, this method returns missing records in the response body, 
-maximum record count is 100.
+### Download sample records
+`GET /api/v1/jobs/download?jobName={name}&ts={timestamp}`
 
+#### Request Parameter
+
+| name  | description                         | type | example value |
+| ----- | ----------------------------------- | ---- | ------------- |
+| jobName    | job name                       | String | 1             |
+| timestamp  | timestamp                      | Long   | 0             |
+
+#### API Example
 ```
+curl -k -G -X GET http://127.0.0.1:8080/api/v1/jobs/download \
+-d jobName=job_name_10 -d timestamp=1547778857807
+```
+If successful, this method returns missing records in the response body, maximum record count is 100.
+
+<div id = "38"></div>
+
+### Get Job Instance by Id
+`GET /api/v1/jobs/instances/{jobInstanceId}`
+
+#### API Example
+```
+curl -k -G -X GET http://127.0.0.1:8080/api/v1/jobs/instances/1
+```
+If successful, this method returns job instance description for the given job instance id. If there is no instance with given id found, returns Griffin Exception.
 
 <h2 id = "4"></h2>
 
 ## Metrics
 
+<div id = "41"></div>
+
 ### Get metrics
 
 `GET /api/v1/metrics`
 
-#### Response Example
+#### API Example
 The response is a map of metrics group by measure name. For example:
 ```
+curl -k -X GET http://127.0.0.1:8080/api/v1/metrics
 {
     "measure_no_predicate_day": [
         {
@@ -935,7 +840,7 @@
             ]
         }
     ],
-    "measre_predicate_hour": [
+    "measure_predicate_hour": [
         {
             "name": "job_predicate_hour",
             "type": "accuracy",
@@ -946,34 +851,32 @@
 }
 ```
 
+<div id = "42"></div>
+
 ### Add metric values
 `POST /api/v1/metrics/values`
-#### Request Header
-| key          | value            |
-| ------------ | ---------------- |
-| Content-Type | application/json |
 #### Request Body
 | name          | description             | type        |
 | ------------- | ----------------------- | ----------- |
 | Metric Values | A list of metric values | MetricValue |
-#### Request Body Sample
+#### API Example
 ```
-[
-	{
-		"name" : "metricName",
-		"tmst" : 1509599811123,
-		"value" : {
-			"__tmst" : 1509599811123,
-			"miss" : 11,
-			"total" : 125000,
-			"matched" : 124989
-		}
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X POST http://127.0.0.1:8080/api/v1/metrics/values \
+-d '[
+    {
+        "name" : "metricName",
+        "tmst" : 1509599811123,
+        "value" : {
+            "__tmst" : 1509599811123,
+            "miss" : 11,
+            "total" : 125000,
+            "matched" : 124989
+        }
    }
-]
+]'
 ```
-#### Response Body Sample
 The response body should have 'errors' field as 'false' if success, for example
-
 ```
 {
     "took": 32,
@@ -999,8 +902,10 @@
 }
 ```
 
+<div id = "43"></div>
+
 ### Get metric values by name 
-`GET /api/v1/metrics/values`
+`GET /api/v1/metrics/values?metricName={name}&size={size}&offset={offset}&tmst={timestamp}`
 
 #### Request Parameter
 name | description | type | example value
@@ -1011,8 +916,9 @@
 tmst | the start timestamp of records you want to get | long | 0
 
 Parameter offset and tmst are optional.
-#### Response Body Sample
+#### API Example
 ```
+curl -k -G -X GET http://127.0.0.1:8080/api/v1/metrics/values -d metricName=job_no_predicate_day -d size=10
 [
     {
         "name": "job_no_predicate_day",
@@ -1044,15 +950,15 @@
 ]
 ```
 
+<div id = "44"></div>
+
 ### Delete metric values by name
-`DELETE /api/v1/metrics/values`
-#### Request Parameters 
-| name       | description               | type   | example value |
-| ---------- | ------------------------- | ------ | ------------- |
-| metricName | name of the metric values | String | metricName    |
-#### Response Body Sample
+`DELETE /api/v1/metrics/values?metricName={name}`
+#### API Example
 The response body should have 'failures' field as empty if success, for example
 ```
+curl -k -H "Accept: application/json" \
+-X DELETE http://127.0.0.1:8080/api/v1/metrics/values?metricName=job_no_predicate_day
 {
     "took": 363,
     "timed_out": false,
@@ -1072,13 +978,41 @@
 }
 ```
 
+<div id = "45"></div>
+
+### Get Metric Value by Job Instance Id
+`GET http://127.0.0.1:8080/api/v1/metrics/values/:jobInstanceId`
+#### API Example
+```
+curl -k -G -X GET http://127.0.0.1:8080/api/v1/metrics/values/{304}
+{
+    "name": "some_job",
+    "tmst": 1553526960000,
+    "value": {
+        "total": 74,
+        "miss": 31,
+        "matched": 43,
+        "matchedFraction": 0.581081081081081
+    },
+    "metadata": {
+        "applicationId": "\"application_1549876136110_0237\"",
+    }
+}
+```
+
+
+
+
+
 <h2 id = "5"></h2>
 
 ### Hive MetaStore
 
+<div id = "51"></div>
+
 ### Get table metadata
 
-`GET /api/v1/metadata/hive/table`
+`GET /api/v1/metadata/hive/table?db={}&table={}`
 
 #### Request Parameters
 | name  | description        | type   | example value |
@@ -1086,8 +1020,12 @@
 | db    | hive database name | String | default       |
 | table | hive table name    | String | demo_src      |
 
-#### Response Example Sample
+#### API Example
 ```
+curl -k -H "Accept: application/json" \
+-G -X GET http://127.0.0.1:8080/api/v1/metadata/hive/table \
+-d db=default \
+-d table=demo_src
 {
     "tableName": "demo_src",
     "dbName": "default",
@@ -1144,25 +1082,34 @@
     ]
 }
 ```
+
+<div id = "52"></div>
+
 ### Get table names
-`GET /api/v1/metadata/hive/tables/names`
+`GET /api/v1/metadata/hive/tables/names?db={}`
 #### Request Parameter
 | name | description        | typ    | example value |
 | ---- | ------------------ | ------ | ------------- |
 | db   | hive database name | String | default       |
 
-#### Response Example Sample
+#### API Example
 ```
+curl -k -H "Accept: application/json" \
+-X GET http://127.0.0.1:8080/api/v1/metadata/hive/table?db=default
 [
   "demo_src",
   "demo_tgt"
 ]
 ```
 
+<div id = "53"></div>
+
 ### Get all database tables metadata
 `GET /api/v1/metadata/hive/dbs/tables`
-#### Response Example Sample
+#### API Example
 ```
+curl -k -H "Accept: application/json" \
+-X GET http://127.0.0.1:8080/api/v1/metadata/hive/dbs/tables
 {
    "default": [
     {
@@ -1278,23 +1225,31 @@
 
 ```
 
+<div id = "54"></div>
+
 ### Get database names
 `GET /api/v1/metadata/hive/dbs`
-#### Response Example Sample
+#### API Example
 ```
+curl -k -H "Accept: application/json" \
+-X GET http://127.0.0.1:8080/api/v1/metadata/hive/dbs
 [
-	"default"
+    "default"
 ]
 ```
 
+<div id = "55"></div>
+
 ### Get tables metadata
-`GET /api/v1/metadata/hive/tables`
+`GET /api/v1/metadata/hive/tables?db={name}`
 #### Request Parameter
 | name | description        | typ    | example value |
 | ---- | ------------------ | ------ | ------------- |
 | db   | hive database name | String | default       |
-#### Response Body Sample
+#### API Example
 ```
+curl -k -H "Accept: application/json" \
+-X GET http://127.0.0.1:8080/api/v1/metadata/hive/tables?db=default
 [
   {
     "tableName": "demo_src",
@@ -1422,7 +1377,13 @@
 | ---- | ------------------------------------- | ---- | --------------------------------------- |
 | map  | a map contains user name and password | Map  | `{"username":"user","password":"test"}` |
 
-#### Response Body Sample
+#### API Example
+```
+curl -k -H "Content-Type: application/json" -H "Accept: application/json" \
+-X POST http://127.0.0.1:8080/api/v1/login/authenticate \
+-d '{"username":"user","password":"test"}'
+```
+if authentication passes, response below will be returned.
 ```
 {
   "fullName": "Default",
diff --git a/griffin-doc/service/postman/griffin.json b/griffin-doc/service/postman/griffin.json
index ac64412..66445da 100644
--- a/griffin-doc/service/postman/griffin.json
+++ b/griffin-doc/service/postman/griffin.json
@@ -1406,7 +1406,7 @@
 								"health"
 							]
 						},
-						"description": "`GET /api/v1/jobs/health`\n\n#### Response Body Sample\n```\n{\n  \"healthyJobCount\": 1,\n  \"jobCount\": 2\n}\n```"
+						"description": "`GET /api/v1/jobs/health`\n\n#### Response Body Sample\n```\n{\n  \"job instance info\"\n}\n```"
 					},
 					"response": [
 						{
@@ -1430,7 +1430,7 @@
 										"health"
 									]
 								},
-								"description": "`GET /api/v1/jobs/health`\n\n#### Response Body Sample\n```\n{\n  \"healthyJobCount\": 1,\n  \"jobCount\": 2\n}\n```"
+								"description": "`GET /api/v1/jobs/health`\n\n#### Response Body Sample\n```\n{\n  \"job instance info\"\n}\n```"
 							},
 							"status": "OK",
 							"code": 200,
@@ -1592,6 +1592,119 @@
 					]
 				},
 				{
+					"name": "Trigger job by id",
+					"request": {
+						"method": "POST",
+						"header": [],
+						"body": {
+							"mode": "raw",
+							"raw": "{\n\"timeout\": \"0\"\n}"
+						},
+						"url": {
+							"raw": "{{BASE_PATH}}/api/v1/jobs/trigger/:id",
+							"host": [
+								"{{BASE_PATH}}"
+							],
+							"path": [
+								"api",
+								"v1",
+								"jobs",
+								"trigger",
+								":id"
+							],
+							"variable": [
+								{
+									"key": "id",
+									"value": ""
+								}
+							]
+						},
+						"description": "`POST /api/v1/jobs/trigger/{id}`\n\n#### Path Variable\n- id -`required` `Long` job id\n\n#### Response\nThe response body should be contains job instance fields if no error happens, and the HTTP status is (200, \"OK\").\n\nIt may return failed messages. For example\n```\n{\n    \"timestamp\": 1517208792108,\n    \"status\": 404,\n    \"error\": \"Not Found\",\n    \"code\": 40402,\n    \"message\": \"Job id does not exist\",\n    \"path\": \"/api/v1/jobs/trigger/2\"\n}\n```\nThere will be 'status' and 'error' fields in response if error happens, which correspond to HTTP status.\n\nThere may also be 'code' and 'message' fields, which will point out the cause.\n\nIf an exception happens at server, there will be an 'exception' field, which is the name of exception."
+					},
+					"response": [
+						{
+							"name": "Trigger job by id example",
+							"originalRequest": {
+								"method": "POST",
+								"header": [],
+								"body": {
+									"mode": "raw",
+									"raw": "{\n\"timeout\": \"0\"\n}"
+								},
+								"url": {
+									"raw": "{{BASE_PATH}}/api/v1/jobs/trigger/:id",
+									"host": [
+										"{{BASE_PATH}}"
+									],
+									"path": [
+										"api",
+										"v1",
+										"jobs",
+										"trigger",
+										":id"
+									],
+									"variable": [
+										{
+											"key": "id",
+											"value": ""
+										}
+									]
+								},
+								"description": "`POST /api/v1/jobs/trigger/{id}`\n\n#### Path Variable\n- id -`required` `Long` job id\n\n#### Response\nThe response body should be contains job instance fields if no error happens, and the HTTP status is (200, \"OK\").\n\nIt may return failed messages. For example\n```\n{\n    \"timestamp\": 1517208792108,\n    \"status\": 404,\n    \"error\": \"Not Found\",\n    \"code\": 40402,\n    \"message\": \"Job id does not exist\",\n    \"path\": \"/api/v1/jobs/trigger/2\"\n}\n```\nThere will be 'status' and 'error' fields in response if error happens, which correspond to HTTP status.\n\nThere may also be 'code' and 'message' fields, which will point out the cause.\n\nIf an exception happens at server, there will be an 'exception' field, which is the name of exception."
+							},
+							"status": "OK",
+							"code": 200,
+							"_postman_previewlanguage": "json",
+							"header": [
+								{
+									"key": "access-control-allow-headers",
+									"value": "X-PINGOTHER, Origin, X-Requested-With, Content-Type, Accept",
+									"name": "access-control-allow-headers",
+									"description": "Used in response to a preflight request to indicate which HTTP headers can be used when making the actual request."
+								},
+								{
+									"key": "access-control-allow-methods",
+									"value": "POST, GET, OPTIONS, DELETE,PUT",
+									"name": "access-control-allow-methods",
+									"description": "Specifies the method or methods allowed when accessing the resource. This is used in response to a preflight request."
+								},
+								{
+									"key": "access-control-allow-origin",
+									"value": "*",
+									"name": "access-control-allow-origin",
+									"description": "Specifies a URI that may access the resource. For requests without credentials, the server may specify '*' as a wildcard, thereby allowing any origin to access the resource."
+								},
+								{
+									"key": "access-control-max-age",
+									"value": "3600",
+									"name": "access-control-max-age",
+									"description": "Indicates how long the results of a preflight request can be cached in seconds."
+								},
+								{
+									"key": "content-type",
+									"value": "application/json;charset=UTF-8",
+									"name": "content-type",
+									"description": "The mime type of this content"
+								},
+								{
+									"key": "date",
+									"value": "Wed, 25 Oct 2017 01:43:23 GMT",
+									"name": "date",
+									"description": "The date and time that the message was sent"
+								},
+								{
+									"key": "transfer-encoding",
+									"value": "chunked",
+									"name": "transfer-encoding",
+									"description": "The form of encoding used to safely transfer the entity to the user. Currently defined methods are: chunked, compress, deflate, gzip, identity."
+								}
+							],
+							"cookie": [],
+							"body": "{\n        \"id\": 1,\n        \"sessionId\": null,\n        \"state\": \"success\",\n        \"appId\": null,\n        \"appUri\": null,\n        \"predicateGroup\": \"PG\",\n        \"predicateName\": \"job_name_predicate_1515399840077\",\n        \"deleted\": true,\n        \"timestamp\": 1515399840092,\n        \"expireTimestamp\": 1516004640092\n}"
+						}
+					]
+				},
+				{
 					"name": "Delete  job by name",
 					"request": {
 						"method": "DELETE",
@@ -1806,6 +1919,75 @@
 							"body": ""
 						}
 					]
+				},
+				{
+					"name": "Get Job Instance by Id",
+					"request": {
+						"method": "GET",
+						"header": [],
+						"body": {
+							"mode": "raw",
+							"raw": ""
+						},
+						"url": {
+							"raw": "{{BASE_PATH}}/api/v1/jobs/instances/:id",
+							"host": [
+								"{{BASE_PATH}}"
+							],
+							"path": [
+								"api",
+								"v1",
+								"jobs",
+								"instances",
+								":id"
+							],
+							"variable": [
+								{
+									"key": "id",
+									"value": "2"
+								}
+							]
+						},
+						"description": "`GET /api/v1/jobs/instances/{id}`\n\n#### Response Body Sample\n```\n{\n  \"job instance info\"\n}\n```"
+					},
+					"response": [
+						{
+							"name": "Get Job Instance by Id",
+							"originalRequest": {
+								"method": "GET",
+								"header": [],
+								"body": {
+									"mode": "raw",
+									"raw": ""
+								},
+								"url": {
+									"raw": "{{BASE_PATH}}/api/v1/jobs/instances/:id",
+									"host": [
+										"{{BASE_PATH}}"
+									],
+									"path": [
+										"api",
+										"v1",
+										"jobs",
+										"instances",
+										":id"
+									],
+									"variable": [
+										{
+											"key": "id",
+											"value": "2"
+										}
+									]
+								},
+								"description": "`GET /api/v1/jobs/instances/{id}`\n\n#### Response Body Sample\n```\n{\n  \"job instance info\"\n}\n```"
+							},
+							"status": "OK",
+							"code": 200,
+							"_postman_previewlanguage": "json",
+							"cookie": [],
+							"body": "{\n    \"healthyJobCount\": 1,\n    \"jobCount\": 2\n}"
+						}
+					]
 				}
 			]
 		},
@@ -2048,7 +2230,7 @@
 						],
 						"body": {
 							"mode": "raw",
-							"raw": "[\n\t{\n\t\t\"name\" : \"metricName\",\n\t\t\"tmst\" : 1509599811123,\n\t\t\"value\" : {\n\t\t\t\"__tmst\" : 1509599811123,\n\t\t\t\"miss\" : 11,\n\t\t\t\"total\" : 125000,\n\t\t\t\"matched\" : 124989\n\t\t}\n   }\n]"
+							"raw": "[\n\t{\n\t\t\"name\" : \"metricName\",\n\t\t\"tmst\" : 1509599811123,\n\t\t\"applicationId\" : \"app_1\",\n\t\t\"value\" : {\n\t\t\t\"__tmst\" : 1509599811123,\n\t\t\t\"miss\" : 11,\n\t\t\t\"total\" : 125000,\n\t\t\t\"matched\" : 124989\n\t\t}\n   }\n]"
 						},
 						"url": {
 							"raw": "{{BASE_PATH}}/api/v1/metrics/values",
@@ -2077,7 +2259,7 @@
 								],
 								"body": {
 									"mode": "raw",
-									"raw": "[\n\t{\n\t\t\"name\" : \"metricName\",\n\t\t\"tmst\" : 1509599811123,\n\t\t\"value\" : {\n\t\t\t\"__tmst\" : 1509599811123,\n\t\t\t\"miss\" : 11,\n\t\t\t\"total\" : 125000,\n\t\t\t\"matched\" : 124989\n\t\t}\n   }\n]"
+									"raw": "[\n\t{\n\t\t\"name\" : \"metricName\",\n\t\t\"tmst\" : 1509599811123,\n\t\t\"applicationId\" : \"app_1\",\n\t\t\"value\" : {\n\t\t\t\"__tmst\" : 1509599811123,\n\t\t\t\"miss\" : 11,\n\t\t\t\"total\" : 125000,\n\t\t\t\"matched\" : 124989\n\t\t}\n   }\n]"
 								},
 								"url": {
 									"raw": "{{BASE_PATH}}/api/v1/metrics/values",
@@ -2275,6 +2457,75 @@
 							"body": "{\"took\":363,\"timed_out\":false,\"total\":5,\"deleted\":5,\"batches\":1,\"version_conflicts\":0,\"noops\":0,\"retries\":{\"bulk\":0,\"search\":0},\"throttled_millis\":0,\"requests_per_second\":-1.0,\"throttled_until_millis\":0,\"failures\":[]}"
 						}
 					]
+				},
+				{
+					"name": "Get Metrics Value by Job Instance Id",
+					"request": {
+						"method": "GET",
+						"header": [],
+						"body": {
+							"mode": "raw",
+							"raw": ""
+						},
+						"url": {
+							"raw": "{{BASE_PATH}}/api/v1/metrics/values/:jobInstanceId",
+							"host": [
+								"{{BASE_PATH}}"
+							],
+							"path": [
+								"api",
+								"v1",
+								"metrics",
+								"values",
+								":jobInstanceId"
+							],
+							"variable": [
+								{
+									"key": "jobInstanceId",
+									"value": "304"
+								}
+							]
+						},
+						"description": "`GET /api/v1/metrics/values/{jobInstanceId}`\n\n#### Response Body Sample\n```\n{\n  \"metric value\"\n}\n```"
+					},
+					"response": [
+						{
+							"name": "Get Metrics Value by Job Instance Id",
+							"originalRequest": {
+								"method": "GET",
+								"header": [],
+								"body": {
+									"mode": "raw",
+									"raw": ""
+								},
+								"url": {
+									"raw": "{{BASE_PATH}}/api/v1/metrics/values/:jobInstanceId",
+									"host": [
+										"{{BASE_PATH}}"
+									],
+									"path": [
+										"api",
+										"v1",
+										"metrics",
+										"values",
+										":jobInstanceId"
+									],
+									"variable": [
+										{
+											"key": "jobInstanceId",
+											"value": "304"
+										}
+									]
+								},
+								"description": "`GET /api/v1/metrics/values/{jobInstanceId}`\n\n#### Response Body Sample\n```\n{\n  \"metric value\"\n}\n```"
+							},
+							"status": "OK",
+							"code": 200,
+							"_postman_previewlanguage": "json",
+							"cookie": [],
+							"body": "{\n    \"name\": some_job,\n    \"tmst\": 1553526960000\n    \"applicationId\": application_1549876136110_0237\n    \"value\": {\n    \"total\": 74}}"
+						}
+					]
 				}
 			]
 		},
diff --git a/measure/pom.xml b/measure/pom.xml
index 51c5834..3facd08 100644
--- a/measure/pom.xml
+++ b/measure/pom.xml
@@ -23,7 +23,7 @@
     <parent>
         <groupId>org.apache.griffin</groupId>
         <artifactId>griffin</artifactId>
-        <version>0.4.0-SNAPSHOT</version>
+        <version>0.6.0-SNAPSHOT</version>
     </parent>
 
     <artifactId>measure</artifactId>
@@ -33,20 +33,13 @@
     <url>http://maven.apache.org</url>
 
     <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-
-        <maven.compiler.source>1.8</maven.compiler.source>
-        <maven.compiler.target>1.8</maven.compiler.target>
-
         <scala.version>2.11.8</scala.version>
         <spark.version>2.2.1</spark.version>
         <scala.binary.version>2.11</scala.binary.version>
-
         <avro.version>1.7.7</avro.version>
         <jackson.version>2.8.7</jackson.version>
         <scalaj.version>2.3.0</scalaj.version>
         <mongo.version>2.1.0</mongo.version>
-        <junit.version>4.11</junit.version>
         <scalatest.version>3.0.0</scalatest.version>
         <slf4j.version>1.7.21</slf4j.version>
         <log4j.version>1.2.16</log4j.version>
@@ -84,6 +77,16 @@
             <artifactId>spark-hive_${scala.binary.version}</artifactId>
             <version>${spark.version}</version>
             <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>commons-httpclient</groupId>
+                    <artifactId>commons-httpclient</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.httpcomponents</groupId>
+                    <artifactId>httpclient</artifactId>
+                </exclusion>
+            </exclusions>
         </dependency>
         <dependency>
             <groupId>org.apache.spark</groupId>
@@ -130,11 +133,7 @@
             <artifactId>slf4j-api</artifactId>
             <version>${slf4j.version}</version>
         </dependency>
-        <!--<dependency>-->
-        <!--<groupId>org.slf4j</groupId>-->
-        <!--<artifactId>slf4j-simple</artifactId>-->
-        <!--<version>${slf4j.version}</version>-->
-        <!--</dependency>-->
+
         <dependency>
             <groupId>org.slf4j</groupId>
             <artifactId>slf4j-log4j12</artifactId>
@@ -157,8 +156,6 @@
         <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
-            <version>${junit.version}</version>
-            <scope>test</scope>
         </dependency>
 
         <!--scala test-->
@@ -189,7 +186,6 @@
             <artifactId>mysql-connector-java</artifactId>
             <version>${mysql.java.version}</version>
         </dependency>
-
         <dependency>
             <groupId>com.datastax.spark</groupId>
             <artifactId>spark-cassandra-connector_2.11</artifactId>
@@ -200,6 +196,11 @@
             <artifactId>commons-httpclient</artifactId>
             <version>${commons.httpclient.version}</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpclient</artifactId>
+            <version>4.5.9</version>
+        </dependency>
     </dependencies>
 
     <build>
@@ -237,12 +238,8 @@
                 </executions>
             </plugin>
             <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-compiler-plugin</artifactId>
-                <version>3.5.1</version>
-                <configuration>
-                    <source>1.8</source>
-                    <target>1.8</target>
-                </configuration>
             </plugin>
             <plugin>
                 <artifactId>maven-assembly-plugin</artifactId>
diff --git a/measure/src/main/resources/config-batch-path.json b/measure/src/main/resources/config-batch-path.json
new file mode 100644
index 0000000..6aab127
--- /dev/null
+++ b/measure/src/main/resources/config-batch-path.json
@@ -0,0 +1,44 @@
+{
+  "name": "accu_batch",
+
+  "process.type": "batch",
+
+  "data.sources": [
+    {
+      "name": "source",
+      "baseline": true,
+      "connectors": [
+        {
+          "type": "avro",
+          "version": "1.7",
+          "config": {
+            "file.path": "measure/src/test/resources/users_info_src"
+          }
+        }
+      ]
+    }, {
+      "name": "target",
+      "connectors": [
+        {
+          "type": "avro",
+          "version": "1.7",
+          "config": {
+            "file.path": "measure/src/test/resources/users_info_target"
+          }
+        }
+      ]
+    }
+  ],
+
+  "evaluate.rule": {
+    "rules": [
+      {
+        "dsl.type": "griffin-dsl",
+        "dq.type": "accuracy",
+        "out.dataframe.name": "accu",
+        "rule": "source.user_id = target.user_id AND upper(source.first_name) = upper(target.first_name) AND source.last_name = target.last_name AND source.address = target.address AND source.email = target.email AND source.phone = target.phone AND source.post_code = target.post_code"
+      }
+    ]
+  },
+  "sinks": ["CONSOLE","ELASTICSEARCH"]
+}
diff --git a/measure/src/main/resources/config-batch.json b/measure/src/main/resources/config-batch.json
index d2257a0..69ad485 100644
--- a/measure/src/main/resources/config-batch.json
+++ b/measure/src/main/resources/config-batch.json
@@ -12,7 +12,7 @@
           "type": "avro",
           "version": "1.7",
           "config": {
-            "file.name": "src/test/resources/users_info_src.avro"
+            "file.name": "measure/src/test/resources/users_info_src.avro"
           }
         }
       ]
@@ -23,7 +23,7 @@
           "type": "avro",
           "version": "1.7",
           "config": {
-            "file.name": "src/test/resources/users_info_target.avro"
+            "file.name": "measure/src/test/resources/users_info_target.avro"
           }
         }
       ]
diff --git a/measure/src/main/resources/env-batch.json b/measure/src/main/resources/env-batch.json
index bed6ed8..f2a1639 100644
--- a/measure/src/main/resources/env-batch.json
+++ b/measure/src/main/resources/env-batch.json
@@ -25,7 +25,7 @@
       "type": "ELASTICSEARCH",
       "config": {
         "method": "post",
-        "api": "http://10.148.181.248:39200/griffin/accuracy",
+        "api": "http://localhost:9200/griffin/accuracy",
         "connection.timeout": "1m",
         "retry": 10
       }
diff --git a/measure/src/main/scala/org/apache/griffin/measure/Application.scala b/measure/src/main/scala/org/apache/griffin/measure/Application.scala
index 99c6a25..0edeed6 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/Application.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/Application.scala
@@ -118,7 +118,7 @@
     }
   }
 
-  private def readParamFile[T <: Param](file: String)(implicit m : ClassTag[T]): Try[T] = {
+  def readParamFile[T <: Param](file: String)(implicit m : ClassTag[T]): Try[T] = {
     val paramReader = ParamReaderFactory.getParamReader(file)
     paramReader.readConfig[T]
   }
diff --git a/measure/src/main/scala/org/apache/griffin/measure/Loggable.scala b/measure/src/main/scala/org/apache/griffin/measure/Loggable.scala
index 2e113ab..dc4db7b 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/Loggable.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/Loggable.scala
@@ -18,33 +18,44 @@
 */
 package org.apache.griffin.measure
 
-import org.slf4j.LoggerFactory
+import org.apache.log4j.Level
+import org.apache.log4j.Logger
 
 trait Loggable {
 
-  @transient private lazy val logger = LoggerFactory.getLogger(getClass)
+  @transient private lazy val logger = Logger.getLogger(getClass)
 
-  protected def info(msg: String): Unit = {
+  @transient protected lazy val griffinLogger = Logger.getLogger("org.apache.griffin")
+
+  def getGriffinLogLevel(): Level = {
+    var logger = griffinLogger
+    while (logger != null && logger.getLevel == null) {
+      logger = logger.getParent.asInstanceOf[Logger]
+    }
+    logger.getLevel
+  }
+
+  protected def info(msg: => String): Unit = {
     logger.info(msg)
   }
 
-  protected def debug(msg: String): Unit = {
+  protected def debug(msg: => String): Unit = {
     logger.debug(msg)
   }
 
-  protected def warn(msg: String): Unit = {
+  protected def warn(msg: => String): Unit = {
     logger.warn(msg)
   }
 
-  protected def warn(msg: String, e: Throwable): Unit = {
+  protected def warn(msg: => String, e: Throwable): Unit = {
     logger.warn(msg, e)
   }
 
-  protected def error(msg: String): Unit = {
+  protected def error(msg: => String): Unit = {
     logger.error(msg)
   }
 
-  protected def error(msg: String, e: Throwable): Unit = {
+  protected def error(msg: => String, e: Throwable): Unit = {
     logger.error(msg, e)
   }
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/configuration/enums/SinkType.scala b/measure/src/main/scala/org/apache/griffin/measure/configuration/enums/SinkType.scala
index d9e5d2b..2a6d335 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/configuration/enums/SinkType.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/configuration/enums/SinkType.scala
@@ -34,6 +34,7 @@
     HdfsSinkType,
     ElasticsearchSinkType,
     MongoSinkType,
+    CustomSinkType,
     UnknownSinkType
   )
 
@@ -84,6 +85,14 @@
   val desc = "distinct"
 }
 
+/**
+  * custom sink (needs using extra jar-file-extension)
+  */
+case object CustomSinkType extends SinkType {
+  val idPattern = "^(?i)custom$".r
+  val desc = "custom"
+}
+
 case object UnknownSinkType extends SinkType {
   val idPattern = "".r
   val desc = "unknown"
diff --git a/measure/src/main/scala/org/apache/griffin/measure/context/DQContext.scala b/measure/src/main/scala/org/apache/griffin/measure/context/DQContext.scala
index b0759c5..2fdf409 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/context/DQContext.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/context/DQContext.scala
@@ -44,7 +44,7 @@
 
   val dataFrameCache: DataFrameCache = DataFrameCache()
 
-  val metricWrapper: MetricWrapper = MetricWrapper(name)
+  val metricWrapper: MetricWrapper = MetricWrapper(name, sparkSession.sparkContext.applicationId)
   val writeMode = WriteMode.defaultMode(procType)
 
   val dataSourceNames: Seq[String] = {
diff --git a/measure/src/main/scala/org/apache/griffin/measure/context/MetricWrapper.scala b/measure/src/main/scala/org/apache/griffin/measure/context/MetricWrapper.scala
index cec737f..484797a 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/context/MetricWrapper.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/context/MetricWrapper.scala
@@ -23,11 +23,12 @@
 /**
   * wrap metrics into one, each calculation produces one metric map
   */
-case class MetricWrapper(name: String) extends Serializable {
+case class MetricWrapper(name: String, applicationId: String) extends Serializable {
 
   val _Name = "name"
   val _Timestamp = "tmst"
   val _Value = "value"
+  val _Metadata = "metadata"
 
   val metrics: MutableMap[Long, Map[String, Any]] = MutableMap()
 
@@ -45,7 +46,8 @@
       (timestamp, Map[String, Any](
         (_Name -> name),
         (_Timestamp -> timestamp),
-        (_Value -> value)
+        (_Value -> value),
+        (_Metadata -> Map("applicationId" -> applicationId))
       ))
     }
   }
diff --git a/measure/src/main/scala/org/apache/griffin/measure/datasource/DataSource.scala b/measure/src/main/scala/org/apache/griffin/measure/datasource/DataSource.scala
index f2cd0ec..fd94e9d 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/datasource/DataSource.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/datasource/DataSource.scala
@@ -48,15 +48,21 @@
 
   def loadData(context: DQContext): TimeRange = {
     info(s"load data [${name}]")
-    val timestamp = context.contextId.timestamp
-    val (dfOpt, timeRange) = data(timestamp)
-    dfOpt match {
-      case Some(df) =>
-        context.runTimeTableRegister.registerTable(name, df)
-      case None =>
-        warn(s"load data source [${name}] fails")
+    try {
+      val timestamp = context.contextId.timestamp
+      val (dfOpt, timeRange) = data(timestamp)
+      dfOpt match {
+        case Some(df) =>
+          context.runTimeTableRegister.registerTable(name, df)
+        case None =>
+          warn(s"Data source [${name}] is null!")
+      }
+      timeRange
+    } catch {
+      case e =>
+        error(s"load data source [${name}] fails")
+        throw e
     }
-    timeRange
   }
 
   private def data(timestamp: Long): (Option[DataFrame], TimeRange) = {
diff --git a/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/AvroBatchDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/AvroBatchDataConnector.scala
index 09c96d5..4587cca 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/AvroBatchDataConnector.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/AvroBatchDataConnector.scala
@@ -34,15 +34,15 @@
                                   timestampStorage: TimestampStorage
                                  ) extends BatchDataConnector {
 
-  val config = dcParam.getConfig
+  val config: Map[String, Any] = dcParam.getConfig
 
   val FilePath = "file.path"
   val FileName = "file.name"
 
-  val filePath = config.getString(FilePath, "")
-  val fileName = config.getString(FileName, "")
+  val filePath: String = config.getString(FilePath, "")
+  val fileName: String = config.getString(FileName, "")
 
-  val concreteFileFullPath = if (pathPrefix) s"${filePath}${fileName}" else fileName
+  val concreteFileFullPath: String = if (pathPrefix()) filePath else fileName
 
   private def pathPrefix(): Boolean = {
     filePath.nonEmpty
@@ -53,15 +53,12 @@
   }
 
   def data(ms: Long): (Option[DataFrame], TimeRange) = {
-    val dfOpt = try {
+    assert(fileExist(), s"Avro file $concreteFileFullPath is not exists!")
+    val dfOpt = {
       val df = sparkSession.read.format("com.databricks.spark.avro").load(concreteFileFullPath)
       val dfOpt = Some(df)
       val preDfOpt = preProcess(dfOpt, ms)
       preDfOpt
-    } catch {
-      case e: Throwable =>
-        error(s"load avro file ${concreteFileFullPath} fails", e)
-        None
     }
     val tmsts = readTmst(ms)
     (dfOpt, TimeRange(ms, tmsts))
diff --git a/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/HiveBatchDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/HiveBatchDataConnector.scala
index 91ab07d..a7926b2 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/HiveBatchDataConnector.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/HiveBatchDataConnector.scala
@@ -47,17 +47,13 @@
   val wheres = whereString.split(",").map(_.trim).filter(_.nonEmpty)
 
   def data(ms: Long): (Option[DataFrame], TimeRange) = {
-    val dfOpt = try {
+    val dfOpt = {
       val dtSql = dataSql
       info(dtSql)
       val df = sparkSession.sql(dtSql)
       val dfOpt = Some(df)
       val preDfOpt = preProcess(dfOpt, ms)
       preDfOpt
-    } catch {
-      case e: Throwable =>
-        error(s"load hive table ${concreteTableName} fails: ${e.getMessage}", e)
-        None
     }
     val tmsts = readTmst(ms)
     (dfOpt, TimeRange(ms, tmsts))
diff --git a/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/TextDirBatchDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/TextDirBatchDataConnector.scala
index e21335e..85b4774 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/TextDirBatchDataConnector.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/TextDirBatchDataConnector.scala
@@ -53,7 +53,8 @@
   }
 
   def data(ms: Long): (Option[DataFrame], TimeRange) = {
-    val dfOpt = try {
+    assert(dirExist(), s"Text dir ${dirPath} is not exists!")
+    val dfOpt = {
       val dataDirs = listSubDirs(dirPath :: Nil, dataDirDepth, readable)
       // touch done file for read dirs
       dataDirs.foreach(dir => touchDone(dir))
@@ -68,10 +69,6 @@
       } else {
         None
       }
-    } catch {
-      case e: Throwable =>
-        error(s"load text dir ${dirPath} fails: ${e.getMessage}", e)
-        None
     }
     val tmsts = readTmst(ms)
     (dfOpt, TimeRange(ms, tmsts))
diff --git a/measure/src/main/scala/org/apache/griffin/measure/launch/DQApp.scala b/measure/src/main/scala/org/apache/griffin/measure/launch/DQApp.scala
index 71ba89d..c57ebff 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/launch/DQApp.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/launch/DQApp.scala
@@ -20,10 +20,11 @@
 
 import scala.util.Try
 
+import org.apache.spark.sql.SparkSession
+
 import org.apache.griffin.measure.Loggable
 import org.apache.griffin.measure.configuration.dqdefinition.{DQConfig, EnvConfig, SinkParam}
 
-
 /**
   * dq application process
   */
@@ -32,6 +33,8 @@
   val envParam: EnvConfig
   val dqParam: DQConfig
 
+  implicit var sparkSession: SparkSession = _
+
   def init: Try[_]
 
   /**
diff --git a/measure/src/main/scala/org/apache/griffin/measure/launch/batch/BatchDQApp.scala b/measure/src/main/scala/org/apache/griffin/measure/launch/batch/BatchDQApp.scala
index 97bffdd..c05d043 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/launch/batch/BatchDQApp.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/launch/batch/BatchDQApp.scala
@@ -41,13 +41,10 @@
 
   val sparkParam = envParam.getSparkParam
   val metricName = dqParam.getName
-//  val dataSourceParams = dqParam.dataSources
-//  val dataSourceNames = dataSourceParams.map(_.name)
   val sinkParams = getSinkParams
 
   var sqlContext: SQLContext = _
-
-  implicit var sparkSession: SparkSession = _
+  var dqContext: DQContext = _
 
   def retryable: Boolean = false
 
@@ -57,7 +54,9 @@
     conf.setAll(sparkParam.getConfig)
     conf.set("spark.sql.crossJoin.enabled", "true")
     sparkSession = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()
+    val logLevel = getGriffinLogLevel()
     sparkSession.sparkContext.setLogLevel(sparkParam.getLogLevel)
+    griffinLogger.setLevel(logLevel)
     sqlContext = sparkSession.sqlContext
 
     // register udf
@@ -76,7 +75,7 @@
     dataSources.foreach(_.init)
 
     // create dq context
-    val dqContext: DQContext = DQContext(
+    dqContext = DQContext(
       contextId, metricName, dataSources, sinkParams, BatchProcessType
     )(sparkSession)
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/launch/streaming/StreamingDQApp.scala b/measure/src/main/scala/org/apache/griffin/measure/launch/streaming/StreamingDQApp.scala
index 0de8980..be32eba 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/launch/streaming/StreamingDQApp.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/launch/streaming/StreamingDQApp.scala
@@ -34,12 +34,12 @@
 import org.apache.griffin.measure.context.streaming.checkpoint.offset.OffsetCheckpointClient
 import org.apache.griffin.measure.context.streaming.metric.CacheResults
 import org.apache.griffin.measure.datasource.DataSourceFactory
+import org.apache.griffin.measure.job.DQJob
 import org.apache.griffin.measure.job.builder.DQJobBuilder
 import org.apache.griffin.measure.launch.DQApp
 import org.apache.griffin.measure.step.builder.udf.GriffinUDFAgent
 import org.apache.griffin.measure.utils.{HdfsUtil, TimeUtil}
 
-
 case class StreamingDQApp(allParam: GriffinConfig) extends DQApp {
 
   val envParam: EnvConfig = allParam.getEnvConfig
@@ -47,14 +47,10 @@
 
   val sparkParam = envParam.getSparkParam
   val metricName = dqParam.getName
-//  val dataSourceParams = dqParam.dataSources
-//  val dataSourceNames = dataSourceParams.map(_.name)
   val sinkParams = getSinkParams
 
   var sqlContext: SQLContext = _
 
-  implicit var sparkSession: SparkSession = _
-
   def retryable: Boolean = true
 
   def init: Try[_] = Try {
@@ -63,7 +59,9 @@
     conf.setAll(sparkParam.getConfig)
     conf.set("spark.sql.crossJoin.enabled", "true")
     sparkSession = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()
+    val logLevel = getGriffinLogLevel()
     sparkSession.sparkContext.setLogLevel(sparkParam.getLogLevel)
+    griffinLogger.setLevel(logLevel)
     sqlContext = sparkSession.sqlContext
 
     // clear checkpoint directory
@@ -168,6 +166,9 @@
     val lock = OffsetCheckpointClient.genLock("process")
     val appSink = globalContext.getSink()
 
+    var dqContext: DQContext = _
+    var dqJob: DQJob = _
+
     def run(): Unit = {
       val updateTimeDate = new Date()
       val updateTime = updateTimeDate.getTime
@@ -183,10 +184,10 @@
           val contextId = ContextId(startTime)
 
           // create dq context
-          val dqContext: DQContext = globalContext.cloneDQContext(contextId)
+          dqContext = globalContext.cloneDQContext(contextId)
 
           // build job
-          val dqJob = DQJobBuilder.buildDQJob(dqContext, evaluateRuleParam)
+          dqJob = DQJobBuilder.buildDQJob(dqContext, evaluateRuleParam)
 
           // dq job execute
           dqJob.execute(dqContext)
diff --git a/measure/src/main/scala/org/apache/griffin/measure/sink/ConsoleSink.scala b/measure/src/main/scala/org/apache/griffin/measure/sink/ConsoleSink.scala
index feebd91..20f4e13 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/sink/ConsoleSink.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/sink/ConsoleSink.scala
@@ -26,7 +26,10 @@
 /**
   * sink metric and record to console, for debug
   */
-case class ConsoleSink(config: Map[String, Any], metricName: String, timeStamp: Long) extends Sink {
+case class ConsoleSink(
+                        config: Map[String, Any],
+                        metricName: String,
+                        timeStamp: Long) extends Sink {
 
   val block: Boolean = true
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/sink/ElasticSearchSink.scala b/measure/src/main/scala/org/apache/griffin/measure/sink/ElasticSearchSink.scala
index 745f760..e78a6a8 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/sink/ElasticSearchSink.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/sink/ElasticSearchSink.scala
@@ -25,12 +25,14 @@
 import org.apache.griffin.measure.utils.{HttpUtil, JsonUtil, TimeUtil}
 import org.apache.griffin.measure.utils.ParamUtil._
 
-
 /**
   * sink metric and record through http request
   */
-case class ElasticSearchSink(config: Map[String, Any], metricName: String,
-                             timeStamp: Long, block: Boolean
+case class ElasticSearchSink(
+                              config: Map[String, Any],
+                              metricName: String,
+                              timeStamp: Long,
+                              block: Boolean
                             ) extends Sink {
 
   val Api = "api"
@@ -64,7 +66,7 @@
 
       def func(): (Long, Future[Boolean]) = {
         import scala.concurrent.ExecutionContext.Implicits.global
-        (timeStamp, Future(HttpUtil.httpRequest(api, method, params, header, data)))
+        (timeStamp, Future(HttpUtil.doHttpRequest(api, method, params, header, data)))
       }
       if (block) SinkTaskRunner.addBlockTask(func _, retry, connectionTimeout)
       else SinkTaskRunner.addNonBlockTask(func _, retry)
diff --git a/measure/src/main/scala/org/apache/griffin/measure/sink/HdfsSink.scala b/measure/src/main/scala/org/apache/griffin/measure/sink/HdfsSink.scala
index d103b32..23fb48e 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/sink/HdfsSink.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/sink/HdfsSink.scala
@@ -28,15 +28,18 @@
 /**
   * sink metric and record to hdfs
   */
-case class HdfsSink(config: Map[String, Any], metricName: String, timeStamp: Long) extends Sink {
+case class HdfsSink(
+                     config: Map[String, Any],
+                     metricName: String,
+                     timeStamp: Long) extends Sink {
 
   val block: Boolean = true
 
-  val Path = "path"
+  val PathKey = "path"
   val MaxPersistLines = "max.persist.lines"
   val MaxLinesPerFile = "max.lines.per.file"
 
-  val path = config.getOrElse(Path, "").toString
+  val parentPath = config.getOrElse(PathKey, "").toString
   val maxPersistLines = config.getInt(MaxPersistLines, -1)
   val maxLinesPerFile = math.min(config.getInt(MaxLinesPerFile, 10000), 1000000)
 
@@ -49,7 +52,7 @@
   var _init = true
 
   def available(): Boolean = {
-    path.nonEmpty
+    parentPath.nonEmpty
   }
 
   private def logHead: String = {
@@ -70,7 +73,7 @@
   }
 
   protected def filePath(file: String): String = {
-    HdfsUtil.getHdfsFilePath(path, s"${metricName}/${timeStamp}/${file}")
+    HdfsUtil.getHdfsFilePath(parentPath, s"${metricName}/${timeStamp}/${file}")
   }
 
   protected def withSuffix(path: String, suffix: String): String = {
diff --git a/measure/src/main/scala/org/apache/griffin/measure/sink/MongoSink.scala b/measure/src/main/scala/org/apache/griffin/measure/sink/MongoSink.scala
index c090201..0885762 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/sink/MongoSink.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/sink/MongoSink.scala
@@ -32,9 +32,11 @@
 /**
   * sink metric and record to mongo
   */
-case class MongoSink(config: Map[String, Any], metricName: String,
-                     timeStamp: Long, block: Boolean
-                       ) extends Sink {
+case class MongoSink(
+                      config: Map[String, Any],
+                      metricName: String,
+                      timeStamp: Long,
+                      block: Boolean) extends Sink {
 
   MongoConnection.init(config)
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/sink/MultiSinks.scala b/measure/src/main/scala/org/apache/griffin/measure/sink/MultiSinks.scala
index b9f72da..3382546 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/sink/MultiSinks.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/sink/MultiSinks.scala
@@ -23,11 +23,11 @@
 /**
   * sink metric and record in multiple ways
   */
-case class MultiSinks(sinks: Iterable[Sink]) extends Sink {
+case class MultiSinks(sinkIter: Iterable[Sink]) extends Sink {
 
   val block: Boolean = false
 
-  val headSinkOpt: Option[Sink] = sinks.headOption
+  val headSinkOpt: Option[Sink] = sinkIter.headOption
 
   val metricName: String = headSinkOpt.map(_.metricName).getOrElse("")
 
@@ -35,13 +35,20 @@
 
   val config: Map[String, Any] = Map[String, Any]()
 
-  def available(): Boolean = { sinks.exists(_.available()) }
+  def available(): Boolean = {
+    sinkIter.exists(_.available())
+  }
 
-  def start(msg: String): Unit = { sinks.foreach(_.start(msg)) }
-  def finish(): Unit = { sinks.foreach(_.finish()) }
+  def start(msg: String): Unit = {
+    sinkIter.foreach(_.start(msg))
+  }
+
+  def finish(): Unit = {
+    sinkIter.foreach(_.finish())
+  }
 
   def log(rt: Long, msg: String): Unit = {
-    sinks.foreach { sink =>
+    sinkIter.foreach { sink =>
       try {
         sink.log(rt, msg)
       } catch {
@@ -51,7 +58,7 @@
   }
 
   def sinkRecords(records: RDD[String], name: String): Unit = {
-    sinks.foreach { sink =>
+    sinkIter.foreach { sink =>
       try {
         sink.sinkRecords(records, name)
       } catch {
@@ -59,8 +66,9 @@
       }
     }
   }
+
   def sinkRecords(records: Iterable[String], name: String): Unit = {
-    sinks.foreach { sink =>
+    sinkIter.foreach { sink =>
       try {
         sink.sinkRecords(records, name)
       } catch {
@@ -68,8 +76,9 @@
       }
     }
   }
+
   def sinkMetrics(metrics: Map[String, Any]): Unit = {
-    sinks.foreach { sink =>
+    sinkIter.foreach { sink =>
       try {
         sink.sinkMetrics(metrics)
       } catch {
diff --git a/measure/src/main/scala/org/apache/griffin/measure/sink/SinkContext.scala b/measure/src/main/scala/org/apache/griffin/measure/sink/SinkContext.scala
new file mode 100644
index 0000000..50d9f60
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/sink/SinkContext.scala
@@ -0,0 +1,21 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.sink
+
+case class SinkContext(config: Map[String, Any], metricName: String, timeStamp: Long, block: Boolean)
diff --git a/measure/src/main/scala/org/apache/griffin/measure/sink/SinkFactory.scala b/measure/src/main/scala/org/apache/griffin/measure/sink/SinkFactory.scala
index 49818f2..7b8bd31 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/sink/SinkFactory.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/sink/SinkFactory.scala
@@ -22,19 +22,20 @@
 
 import org.apache.griffin.measure.configuration.dqdefinition.SinkParam
 import org.apache.griffin.measure.configuration.enums._
+import org.apache.griffin.measure.utils.ParamUtil._
 
-
-
-case class SinkFactory(sinkParams: Iterable[SinkParam], metricName: String) extends Serializable {
+case class SinkFactory(sinkParamIter: Iterable[SinkParam],
+                       metricName: String) extends Serializable {
 
   /**
     * create sink
-    * @param timeStamp    the timestamp of sink
-    * @param block        sink write metric in block or non-block way
-    * @return   sink
+    *
+    * @param timeStamp the timestamp of sink
+    * @param block     sink write metric in block or non-block way
+    * @return sink
     */
   def getSinks(timeStamp: Long, block: Boolean): MultiSinks = {
-    MultiSinks(sinkParams.flatMap(param => getSink(timeStamp, param, block)))
+    MultiSinks(sinkParamIter.flatMap(param => getSink(timeStamp, param, block)))
   }
 
   private def getSink(timeStamp: Long, sinkParam: SinkParam, block: Boolean): Option[Sink] = {
@@ -45,6 +46,7 @@
       case HdfsSinkType => Try(HdfsSink(config, metricName, timeStamp))
       case ElasticsearchSinkType => Try(ElasticSearchSink(config, metricName, timeStamp, block))
       case MongoSinkType => Try(MongoSink(config, metricName, timeStamp, block))
+      case CustomSinkType => Try(getCustomSink(config, metricName, timeStamp, block))
       case _ => throw new Exception(s"sink type ${sinkType} is not supported!")
     }
     sinkTry match {
@@ -53,4 +55,34 @@
     }
   }
 
+  /**
+    * Using custom sink
+    *
+    * how it might look in env.json:
+    *
+    * "sinks": [
+    * {
+    * "type": "CUSTOM",
+    * "config": {
+    * "class": "com.yourcompany.griffin.sinks.MySuperSink",
+    * "path": "/Users/Shared"
+    * }
+    * },
+    *
+    */
+  private def getCustomSink(config: Map[String, Any],
+                            metricName: String,
+                            timeStamp: Long,
+                            block: Boolean): Sink = {
+    val className = config.getString("class", "")
+    val cls = Class.forName(className)
+    if (classOf[Sink].isAssignableFrom(cls)) {
+      val ctx = SinkContext(config, metricName, timeStamp, block)
+      val method = cls.getDeclaredMethod("apply", classOf[SinkContext])
+      method.invoke(null, ctx).asInstanceOf[Sink]
+    } else {
+      throw new ClassCastException(s"$className should extend Sink")
+    }
+  }
+
 }
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/DQStep.scala b/measure/src/main/scala/org/apache/griffin/measure/step/DQStep.scala
index 60c8477..6a50ebb 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/DQStep.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/DQStep.scala
@@ -33,3 +33,12 @@
   def getNames(): Seq[String] = name :: Nil
 
 }
+
+object DQStepStatus extends Enumeration {
+  val PENDING = Value
+  val RUNNING = Value
+  val COMPLETE = Value
+  val FAILED = Value
+}
+
+
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/builder/DataFrameOpsDQStepBuilder.scala b/measure/src/main/scala/org/apache/griffin/measure/step/builder/DataFrameOpsDQStepBuilder.scala
index 796c797..743b05d 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/builder/DataFrameOpsDQStepBuilder.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/builder/DataFrameOpsDQStepBuilder.scala
@@ -29,7 +29,7 @@
     val name = getStepName(ruleParam.getOutDfName())
     val inputDfName = getStepName(ruleParam.getInDfName())
     val transformStep = DataFrameOpsTransformStep(
-      name, inputDfName, ruleParam.getRule, ruleParam.getDetails, ruleParam.getCache)
+      name, inputDfName, ruleParam.getRule, ruleParam.getDetails, None, ruleParam.getCache)
     transformStep +: buildDirectWriteSteps(ruleParam)
   }
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/builder/SparkSqlDQStepBuilder.scala b/measure/src/main/scala/org/apache/griffin/measure/step/builder/SparkSqlDQStepBuilder.scala
index b5dfd0c..0fdf20a 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/builder/SparkSqlDQStepBuilder.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/builder/SparkSqlDQStepBuilder.scala
@@ -28,7 +28,7 @@
   def buildSteps(context: DQContext, ruleParam: RuleParam): Seq[DQStep] = {
     val name = getStepName(ruleParam.getOutDfName())
     val transformStep = SparkSqlTransformStep(
-      name, ruleParam.getRule, ruleParam.getDetails, ruleParam.getCache)
+      name, ruleParam.getRule, ruleParam.getDetails, None, ruleParam.getCache)
     transformStep +: buildDirectWriteSteps(ruleParam)
   }
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/AccuracyExpr2DQSteps.scala b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/AccuracyExpr2DQSteps.scala
index f7ff3ef..3bb5737 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/AccuracyExpr2DQSteps.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/AccuracyExpr2DQSteps.scala
@@ -81,25 +81,22 @@
         s"SELECT ${selClause} FROM `${sourceName}` " +
           s"LEFT JOIN `${targetName}` ON ${onClause} WHERE ${whereClause}"
       }
-      val missRecordsTransStep =
-        SparkSqlTransformStep(missRecordsTableName, missRecordsSql, emptyMap, true)
 
       val missRecordsWriteSteps = procType match {
         case BatchProcessType =>
           val rwName =
             ruleParam.getOutputOpt(RecordOutputType).
               flatMap(_.getNameOpt).getOrElse(missRecordsTableName)
-          RecordWriteStep(rwName, missRecordsTableName) :: Nil
-        case StreamingProcessType => Nil
-      }
-      val missRecordsUpdateWriteSteps = procType match {
-        case BatchProcessType => Nil
+          RecordWriteStep(rwName, missRecordsTableName)
         case StreamingProcessType =>
           val dsName =
             ruleParam.getOutputOpt(DscUpdateOutputType).flatMap(_.getNameOpt).getOrElse(sourceName)
-          DataSourceUpdateWriteStep(dsName, missRecordsTableName) :: Nil
+          DataSourceUpdateWriteStep(dsName, missRecordsTableName)
       }
 
+      val missRecordsTransStep =
+        SparkSqlTransformStep(missRecordsTableName, missRecordsSql, emptyMap, Some(missRecordsWriteSteps), true)
+
       // 2. miss count
       val missCountTableName = "__missCount"
       val missColName = details.getStringOrKey(_miss)
@@ -111,6 +108,7 @@
             s"FROM `${missRecordsTableName}` GROUP BY `${ConstantColumns.tmst}`"
       }
       val missCountTransStep = SparkSqlTransformStep(missCountTableName, missCountSql, emptyMap)
+      missCountTransStep.parentSteps += missRecordsTransStep
 
       // 3. total count
       val totalCountTableName = "__totalCount"
@@ -150,24 +148,24 @@
              |ON `${totalCountTableName}`.`${ConstantColumns.tmst}` = `${missCountTableName}`.`${ConstantColumns.tmst}`
          """.stripMargin
       }
-      val accuracyTransStep = SparkSqlTransformStep(accuracyTableName, accuracyMetricSql, emptyMap)
-      val accuracyMetricWriteSteps = procType match {
+
+      val accuracyMetricWriteStep = procType match {
         case BatchProcessType =>
           val metricOpt = ruleParam.getOutputOpt(MetricOutputType)
           val mwName = metricOpt.flatMap(_.getNameOpt).getOrElse(ruleParam.getOutDfName())
           val flattenType = metricOpt.map(_.getFlatten).getOrElse(FlattenType.default)
-          MetricWriteStep(mwName, accuracyTableName, flattenType) :: Nil
-        case StreamingProcessType => Nil
+          Some(MetricWriteStep(mwName, accuracyTableName, flattenType))
+        case StreamingProcessType => None
       }
 
-      // accuracy current steps
-      val transSteps1 = missRecordsTransStep :: missCountTransStep :: totalCountTransStep :: accuracyTransStep :: Nil
-      val writeSteps1 =
-        accuracyMetricWriteSteps ++ missRecordsWriteSteps ++ missRecordsUpdateWriteSteps
+      val accuracyTransStep =
+        SparkSqlTransformStep(accuracyTableName, accuracyMetricSql, emptyMap, accuracyMetricWriteStep)
+      accuracyTransStep.parentSteps += missCountTransStep
+      accuracyTransStep.parentSteps += totalCountTransStep
 
-      // streaming extra steps
-      val (transSteps2, writeSteps2) = procType match {
-        case BatchProcessType => (Nil, Nil)
+      procType match {
+        case BatchProcessType => accuracyTransStep :: Nil
+        // streaming extra steps
         case StreamingProcessType =>
           // 5. accuracy metric merge
           val accuracyMetricTableName = "__accuracy"
@@ -177,14 +175,16 @@
             (AccuracyOprKeys._total -> totalColName),
             (AccuracyOprKeys._matched -> matchedColName)
           )
-          val accuracyMetricTransStep = DataFrameOpsTransformStep(accuracyMetricTableName,
-            accuracyTableName, accuracyMetricRule, accuracyMetricDetails)
           val accuracyMetricWriteStep = {
             val metricOpt = ruleParam.getOutputOpt(MetricOutputType)
             val mwName = metricOpt.flatMap(_.getNameOpt).getOrElse(ruleParam.getOutDfName())
             val flattenType = metricOpt.map(_.getFlatten).getOrElse(FlattenType.default)
             MetricWriteStep(mwName, accuracyMetricTableName, flattenType)
           }
+          val accuracyMetricTransStep = DataFrameOpsTransformStep(accuracyMetricTableName,
+            accuracyTableName, accuracyMetricRule, accuracyMetricDetails, Some(accuracyMetricWriteStep))
+          accuracyMetricTransStep.parentSteps += accuracyTransStep
+
 
           // 6. collect accuracy records
           val accuracyRecordTableName = "__accuracyRecords"
@@ -194,8 +194,7 @@
                |FROM `${accuracyMetricTableName}` WHERE `${ConstantColumns.record}`
              """.stripMargin
           }
-          val accuracyRecordTransStep = SparkSqlTransformStep(
-            accuracyRecordTableName, accuracyRecordSql, emptyMap)
+
           val accuracyRecordWriteStep = {
             val rwName =
               ruleParam.getOutputOpt(RecordOutputType).flatMap(_.getNameOpt)
@@ -203,14 +202,12 @@
 
             RecordWriteStep(rwName, missRecordsTableName, Some(accuracyRecordTableName))
           }
+          val accuracyRecordTransStep = SparkSqlTransformStep(
+            accuracyRecordTableName, accuracyRecordSql, emptyMap, Some(accuracyRecordWriteStep))
+          accuracyRecordTransStep.parentSteps += accuracyMetricTransStep
 
-          // extra steps
-          (accuracyMetricTransStep :: accuracyRecordTransStep :: Nil,
-            accuracyMetricWriteStep :: accuracyRecordWriteStep :: Nil)
+          accuracyRecordTransStep :: Nil
       }
-
-      // full steps
-      transSteps1 ++ transSteps2 ++ writeSteps1 ++ writeSteps2
     }
   }
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/CompletenessExpr2DQSteps.scala b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/CompletenessExpr2DQSteps.scala
index 87cfa86..7312f29 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/CompletenessExpr2DQSteps.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/CompletenessExpr2DQSteps.scala
@@ -81,7 +81,7 @@
         s"SELECT ${selClause} FROM `${sourceName}`"
       }
       val sourceAliasTransStep =
-        SparkSqlTransformStep(sourceAliasTableName, sourceAliasSql, emptyMap, true)
+        SparkSqlTransformStep(sourceAliasTableName, sourceAliasSql, emptyMap, None, true)
 
       // 2. incomplete record
       val incompleteRecordsTableName = "__incompleteRecords"
@@ -91,14 +91,17 @@
       val incompleteRecordsSql =
         s"SELECT * FROM `${sourceAliasTableName}` WHERE ${incompleteWhereClause}"
 
-      val incompleteRecordTransStep =
-        SparkSqlTransformStep(incompleteRecordsTableName, incompleteRecordsSql, emptyMap, true)
       val incompleteRecordWriteStep = {
         val rwName =
           ruleParam.getOutputOpt(RecordOutputType).flatMap(_.getNameOpt)
             .getOrElse(incompleteRecordsTableName)
         RecordWriteStep(rwName, incompleteRecordsTableName)
       }
+      val incompleteRecordTransStep =
+        SparkSqlTransformStep(incompleteRecordsTableName, incompleteRecordsSql, emptyMap,
+          Some(incompleteRecordWriteStep), true)
+      incompleteRecordTransStep.parentSteps += sourceAliasTransStep
+
 
       // 3. incomplete count
       val incompleteCountTableName = "__incompleteCount"
@@ -112,6 +115,7 @@
       }
       val incompleteCountTransStep =
         SparkSqlTransformStep(incompleteCountTableName, incompleteCountSql, emptyMap)
+      incompleteCountTransStep.parentSteps += incompleteRecordTransStep
 
       // 4. total count
       val totalCountTableName = "__totalCount"
@@ -124,6 +128,7 @@
             s"FROM `${sourceAliasTableName}` GROUP BY `${ConstantColumns.tmst}`"
       }
       val totalCountTransStep = SparkSqlTransformStep(totalCountTableName, totalCountSql, emptyMap)
+      totalCountTransStep.parentSteps += sourceAliasTransStep
 
       // 5. complete metric
       val completeTableName = ruleParam.getOutDfName()
@@ -146,25 +151,19 @@
              |ON `${totalCountTableName}`.`${ConstantColumns.tmst}` = `${incompleteCountTableName}`.`${ConstantColumns.tmst}`
          """.stripMargin
       }
-      val completeTransStep = SparkSqlTransformStep(completeTableName, completeMetricSql, emptyMap)
       val completeWriteStep = {
         val metricOpt = ruleParam.getOutputOpt(MetricOutputType)
         val mwName = metricOpt.flatMap(_.getNameOpt).getOrElse(completeTableName)
         val flattenType = metricOpt.map(_.getFlatten).getOrElse(FlattenType.default)
         MetricWriteStep(mwName, completeTableName, flattenType)
       }
+      val completeTransStep =
+        SparkSqlTransformStep(completeTableName, completeMetricSql, emptyMap, Some(completeWriteStep))
+      completeTransStep.parentSteps += incompleteCountTransStep
+      completeTransStep.parentSteps += totalCountTransStep
 
-      val transSteps = {
-        sourceAliasTransStep :: incompleteRecordTransStep ::
-          incompleteCountTransStep :: totalCountTransStep ::
-          completeTransStep :: Nil
-      }
-      val writeSteps = {
-        incompleteRecordWriteStep :: completeWriteStep :: Nil
-      }
-
-      // full steps
-      transSteps ++ writeSteps
+      val transSteps = completeTransStep :: Nil
+      transSteps
     }
   }
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/DistinctnessExpr2DQSteps.scala b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/DistinctnessExpr2DQSteps.scala
index 70fee6c..65460c3 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/DistinctnessExpr2DQSteps.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/DistinctnessExpr2DQSteps.scala
@@ -102,7 +102,7 @@
         s"SELECT ${selClause} FROM `${sourceName}`"
       }
       val sourceAliasTransStep =
-        SparkSqlTransformStep(sourceAliasTableName, sourceAliasSql, emptyMap, true)
+        SparkSqlTransformStep(sourceAliasTableName, sourceAliasSql, emptyMap, None, true)
 
       // 2. total metric
       val totalTableName = "__totalMetric"
@@ -110,10 +110,12 @@
       val totalSql = {
         s"SELECT COUNT(*) AS `${totalColName}` FROM `${sourceAliasTableName}`"
       }
-      val totalTransStep = SparkSqlTransformStep(totalTableName, totalSql, emptyMap)
       val totalMetricWriteStep = {
         MetricWriteStep(totalColName, totalTableName, EntriesFlattenType, writeTimestampOpt)
       }
+      val totalTransStep =
+        SparkSqlTransformStep(totalTableName, totalSql, emptyMap, Some(totalMetricWriteStep))
+      totalTransStep.parentSteps += sourceAliasTransStep
 
       // 3. group by self
       val selfGroupTableName = "__selfGroup"
@@ -127,12 +129,12 @@
           """.stripMargin
       }
       val selfGroupTransStep =
-        SparkSqlTransformStep(selfGroupTableName, selfGroupSql, emptyMap, true)
+        SparkSqlTransformStep(selfGroupTableName, selfGroupSql, emptyMap, None, true)
+      selfGroupTransStep.parentSteps += sourceAliasTransStep
 
-      val transSteps1 = sourceAliasTransStep :: totalTransStep :: selfGroupTransStep :: Nil
-      val writeSteps1 = totalMetricWriteStep :: Nil
+      val transSteps1 = totalTransStep :: selfGroupTransStep :: Nil
 
-      val ((transSteps2, writeSteps2), dupCountTableName) = procType match {
+      val (transSteps2, dupCountTableName) = procType match {
         case StreamingProcessType if (withOlderTable) =>
           // 4.0 update old data
           val targetDsUpdateWriteStep = DataSourceUpdateWriteStep(targetName, targetName)
@@ -163,6 +165,8 @@
             """.stripMargin
           }
           val joinedTransStep = SparkSqlTransformStep(joinedTableName, joinedSql, emptyMap)
+          joinedTransStep.parentSteps += selfGroupTransStep
+          joinedTransStep.parentSteps += olderAliasTransStep
 
           // 6. group by joined data
           val groupTableName = "__group"
@@ -176,6 +180,7 @@
              """.stripMargin
           }
           val groupTransStep = SparkSqlTransformStep(groupTableName, groupSql, emptyMap)
+          groupTransStep.parentSteps += joinedTransStep
 
           // 7. final duplicate count
           val finalDupCountTableName = "__finalDupCount"
@@ -203,13 +208,12 @@
              """.stripMargin
           }
           val finalDupCountTransStep =
-            SparkSqlTransformStep(finalDupCountTableName, finalDupCountSql, emptyMap, true)
+            SparkSqlTransformStep(finalDupCountTableName, finalDupCountSql, emptyMap, None, true)
+          finalDupCountTransStep.parentSteps += groupTransStep
 
-          ((olderAliasTransStep :: joinedTransStep
-            :: groupTransStep :: finalDupCountTransStep :: Nil,
-            targetDsUpdateWriteStep :: Nil), finalDupCountTableName)
+          (finalDupCountTransStep :: targetDsUpdateWriteStep :: Nil, finalDupCountTableName)
         case _ =>
-          ((Nil, Nil), selfGroupTableName)
+          (selfGroupTransStep :: Nil, selfGroupTableName)
       }
 
       // 8. distinct metric
@@ -221,16 +225,16 @@
            |FROM `${dupCountTableName}` WHERE `${ConstantColumns.distinct}`
          """.stripMargin
       }
-      val distTransStep = SparkSqlTransformStep(distTableName, distSql, emptyMap)
       val distMetricWriteStep = {
         MetricWriteStep(distColName, distTableName, EntriesFlattenType, writeTimestampOpt)
       }
+      val distTransStep =
+        SparkSqlTransformStep(distTableName, distSql, emptyMap, Some(distMetricWriteStep))
 
       val transSteps3 = distTransStep :: Nil
-      val writeSteps3 = distMetricWriteStep :: Nil
 
       val duplicationArrayName = details.getString(_duplicationArray, "")
-      val (transSteps4, writeSteps4) = if (duplicationArrayName.nonEmpty) {
+      val transSteps4 = if (duplicationArrayName.nonEmpty) {
         val recordEnable = details.getBoolean(_recordEnable, false)
         if (groupAliases.size > 0) {
           // with some group by requirement
@@ -262,6 +266,7 @@
                """.stripMargin
           }
           val rnTransStep = SparkSqlTransformStep(rnTableName, rnSql, emptyMap)
+          rnTransStep.parentSteps += informedTransStep
 
           // 11. recognize duplicate items
           val dupItemsTableName = "__dupItems"
@@ -271,11 +276,23 @@
                |WHERE NOT `${ConstantColumns.distinct}` OR `${ConstantColumns.rowNumber}` > 1
                """.stripMargin
           }
-          val dupItemsTransStep = SparkSqlTransformStep(dupItemsTableName, dupItemsSql, emptyMap)
           val dupItemsWriteStep = {
             val rwName = ruleParam.getOutputOpt(RecordOutputType).flatMap(_.getNameOpt).getOrElse(dupItemsTableName)
             RecordWriteStep(rwName, dupItemsTableName, None, writeTimestampOpt)
           }
+          val dupItemsTransStep = {
+            if (recordEnable) {
+              SparkSqlTransformStep(
+                dupItemsTableName,
+                dupItemsSql,
+                emptyMap,
+                Some(dupItemsWriteStep)
+              )
+            } else {
+              SparkSqlTransformStep(dupItemsTableName, dupItemsSql, emptyMap)
+            }
+          }
+          dupItemsTransStep.parentSteps += rnTransStep
 
           // 12. group by dup Record metric
           val groupDupMetricTableName = "__groupDupMetric"
@@ -287,26 +304,22 @@
                |FROM `${dupItemsTableName}` GROUP BY ${groupSelClause}, `${dupColName}`
              """.stripMargin
           }
-          val groupDupMetricTransStep =
-            SparkSqlTransformStep(groupDupMetricTableName, groupDupMetricSql, emptyMap)
           val groupDupMetricWriteStep = {
             MetricWriteStep(duplicationArrayName,
               groupDupMetricTableName,
               ArrayFlattenType,
               writeTimestampOpt)
           }
+          val groupDupMetricTransStep =
+            SparkSqlTransformStep(
+              groupDupMetricTableName,
+              groupDupMetricSql,
+              emptyMap,
+              Some(groupDupMetricWriteStep)
+            )
+          groupDupMetricTransStep.parentSteps += dupItemsTransStep
 
-          val msteps = {
-            informedTransStep :: rnTransStep :: dupItemsTransStep :: groupDupMetricTransStep :: Nil
-          }
-          val wsteps = if (recordEnable) {
-            dupItemsWriteStep :: groupDupMetricWriteStep :: Nil
-          } else {
-            groupDupMetricWriteStep :: Nil
-          }
-
-          (msteps, wsteps)
-
+          groupDupMetricTransStep :: Nil
         } else {
           // no group by requirement
           // 9. duplicate record
@@ -323,16 +336,25 @@
                |FROM `${dupCountTableName}` WHERE `${dupColName}` > 0
               """.stripMargin
           }
-          val dupRecordTransStep =
-            SparkSqlTransformStep(dupRecordTableName, dupRecordSql, emptyMap, true)
-
           val dupRecordWriteStep = {
             val rwName =
               ruleParam.getOutputOpt(RecordOutputType).flatMap(_.getNameOpt)
                 .getOrElse(dupRecordTableName)
-
             RecordWriteStep(rwName, dupRecordTableName, None, writeTimestampOpt)
           }
+          val dupRecordTransStep = {
+            if (recordEnable) {
+              SparkSqlTransformStep(
+                dupRecordTableName,
+                dupRecordSql,
+                emptyMap,
+                Some(dupRecordWriteStep),
+                true
+              )
+            } else {
+              SparkSqlTransformStep(dupRecordTableName, dupRecordSql, emptyMap, None, true)
+            }
+          }
 
           // 10. duplicate metric
           val dupMetricTableName = "__dupMetric"
@@ -343,7 +365,6 @@
                |FROM `${dupRecordTableName}` GROUP BY `${dupColName}`
               """.stripMargin
           }
-          val dupMetricTransStep = SparkSqlTransformStep(dupMetricTableName, dupMetricSql, emptyMap)
           val dupMetricWriteStep = {
             MetricWriteStep(
               duplicationArrayName,
@@ -352,24 +373,21 @@
               writeTimestampOpt
             )
           }
+          val dupMetricTransStep =
+            SparkSqlTransformStep(
+              dupMetricTableName,
+              dupMetricSql,
+              emptyMap,
+              Some(dupMetricWriteStep)
+            )
+          dupMetricTransStep.parentSteps += dupRecordTransStep
 
-          val msteps = {
-            dupRecordTransStep :: dupMetricTransStep :: Nil
-          }
-          val wsteps = if (recordEnable) {
-            dupRecordWriteStep :: dupMetricWriteStep :: Nil
-          } else {
-            dupMetricWriteStep :: Nil
-          }
-
-          (msteps, wsteps)
+          dupMetricTransStep :: Nil
         }
-      } else (Nil, Nil)
+      } else Nil
 
       // full steps
-      transSteps1 ++ transSteps2 ++ transSteps3 ++ transSteps4 ++
-        writeSteps1 ++ writeSteps2 ++ writeSteps3 ++ writeSteps4
-
+      transSteps1 ++ transSteps2 ++ transSteps3 ++ transSteps4
     }
   }
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/Expr2DQSteps.scala b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/Expr2DQSteps.scala
index 492f4fd..e9a65b4 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/Expr2DQSteps.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/Expr2DQSteps.scala
@@ -21,9 +21,10 @@
 import org.apache.griffin.measure.Loggable
 import org.apache.griffin.measure.configuration.dqdefinition.RuleParam
 import org.apache.griffin.measure.configuration.enums._
-import org.apache.griffin.measure.context.{ContextId, DQContext, TimeRange}
+import org.apache.griffin.measure.context.DQContext
 import org.apache.griffin.measure.step.DQStep
 import org.apache.griffin.measure.step.builder.dsl.expr.Expr
+import org.apache.griffin.measure.step.write.{MetricWriteStep, RecordWriteStep, WriteStep}
 
 trait Expr2DQSteps extends Loggable with Serializable {
 
@@ -31,7 +32,6 @@
   protected val emptyMap = Map[String, Any]()
 
   def getDQSteps(): Seq[DQStep]
-
 }
 
 /**
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/ProfilingExpr2DQSteps.scala b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/ProfilingExpr2DQSteps.scala
index af493af..68ca2f4 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/ProfilingExpr2DQSteps.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/ProfilingExpr2DQSteps.scala
@@ -97,14 +97,15 @@
           s"${fromClause} ${preGroupbyClause} ${groupbyClause} ${postGroupbyClause}"
       }
       val profilingName = ruleParam.getOutDfName()
-      val profilingTransStep = SparkSqlTransformStep(profilingName, profilingSql, details)
       val profilingMetricWriteStep = {
         val metricOpt = ruleParam.getOutputOpt(MetricOutputType)
         val mwName = metricOpt.flatMap(_.getNameOpt).getOrElse(ruleParam.getOutDfName())
         val flattenType = metricOpt.map(_.getFlatten).getOrElse(FlattenType.default)
         MetricWriteStep(mwName, profilingName, flattenType)
       }
-      profilingTransStep :: profilingMetricWriteStep :: Nil
+      val profilingTransStep =
+        SparkSqlTransformStep(profilingName, profilingSql, details, Some(profilingMetricWriteStep))
+      profilingTransStep :: Nil
     }
   }
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/TimelinessExpr2DQSteps.scala b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/TimelinessExpr2DQSteps.scala
index 71eb452..5a3acfb 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/TimelinessExpr2DQSteps.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/TimelinessExpr2DQSteps.scala
@@ -104,7 +104,8 @@
         s"SELECT *, (`${etsColName}` - `${ConstantColumns.beginTs}`) AS `${latencyColName}` " +
           s"FROM `${inTimeTableName}`"
       }
-      val latencyTransStep = SparkSqlTransformStep(latencyTableName, latencySql, emptyMap, true)
+      val latencyTransStep = SparkSqlTransformStep(latencyTableName, latencySql, emptyMap, None, true)
+      latencyTransStep.parentSteps += inTimeTransStep
 
       // 3. timeliness metric
       val metricTableName = ruleParam.getOutDfName()
@@ -128,26 +129,26 @@
              |GROUP BY `${ConstantColumns.tmst}`
            """.stripMargin
       }
-      val metricTransStep = SparkSqlTransformStep(metricTableName, metricSql, emptyMap)
       val metricWriteStep = {
         val metricOpt = ruleParam.getOutputOpt(MetricOutputType)
         val mwName = metricOpt.flatMap(_.getNameOpt).getOrElse(ruleParam.getOutDfName())
         val flattenType = metricOpt.map(_.getFlatten).getOrElse(FlattenType.default)
         MetricWriteStep(mwName, metricTableName, flattenType)
       }
+      val metricTransStep =
+        SparkSqlTransformStep(metricTableName, metricSql, emptyMap, Some(metricWriteStep))
+      metricTransStep.parentSteps += latencyTransStep
 
       // current steps
-      val transSteps1 = inTimeTransStep :: latencyTransStep :: metricTransStep :: Nil
-      val writeSteps1 = metricWriteStep :: Nil
+      val transSteps1 = metricTransStep :: Nil
 
       // 4. timeliness record
-      val (transSteps2, writeSteps2) = TimeUtil.milliseconds(details.getString(_threshold, "")) match {
+      val transSteps2 = TimeUtil.milliseconds(details.getString(_threshold, "")) match {
         case Some(tsh) =>
           val recordTableName = "__lateRecords"
           val recordSql = {
             s"SELECT * FROM `${latencyTableName}` WHERE `${latencyColName}` > ${tsh}"
           }
-          val recordTransStep = SparkSqlTransformStep(recordTableName, recordSql, emptyMap)
           val recordWriteStep = {
             val rwName =
               ruleParam.getOutputOpt(RecordOutputType).flatMap(_.getNameOpt)
@@ -155,12 +156,16 @@
 
             RecordWriteStep(rwName, recordTableName, None)
           }
-          (recordTransStep :: Nil, recordWriteStep :: Nil)
-        case _ => (Nil, Nil)
+          val recordTransStep =
+            SparkSqlTransformStep(recordTableName, recordSql, emptyMap, Some(recordWriteStep))
+          recordTransStep.parentSteps += latencyTransStep
+
+          recordTransStep :: Nil
+        case _ => Nil
       }
 
       // 5. ranges
-      val (transSteps3, writeSteps3) = TimeUtil.milliseconds(details.getString(_stepSize, "")) match {
+      val transSteps3 = TimeUtil.milliseconds(details.getString(_stepSize, "")) match {
         case Some(stepSize) =>
           // 5.1 range
           val rangeTableName = "__range"
@@ -172,6 +177,7 @@
              """.stripMargin
           }
           val rangeTransStep = SparkSqlTransformStep(rangeTableName, rangeSql, emptyMap)
+          rangeTransStep.parentSteps += latencyTransStep
 
           // 5.2 range metric
           val rangeMetricTableName = "__rangeMetric"
@@ -188,19 +194,20 @@
                  |FROM `${rangeTableName}` GROUP BY `${ConstantColumns.tmst}`, `${stepColName}`
                 """.stripMargin
           }
-          val rangeMetricTransStep =
-            SparkSqlTransformStep(rangeMetricTableName, rangeMetricSql, emptyMap)
           val rangeMetricWriteStep = {
             MetricWriteStep(stepColName, rangeMetricTableName, ArrayFlattenType)
           }
+          val rangeMetricTransStep =
+            SparkSqlTransformStep(rangeMetricTableName, rangeMetricSql, emptyMap, Some(rangeMetricWriteStep))
+          rangeMetricTransStep.parentSteps += rangeTransStep
 
-          (rangeTransStep :: rangeMetricTransStep :: Nil, rangeMetricWriteStep :: Nil)
-        case _ => (Nil, Nil)
+          rangeMetricTransStep :: Nil
+        case _ => Nil
       }
 
       // 6. percentiles
       val percentiles = getPercentiles(details)
-      val (transSteps4, writeSteps4) = if (percentiles.size > 0) {
+      val transSteps4 = if (percentiles.size > 0) {
         val percentileTableName = "__percentile"
         val percentileColName = details.getStringOrKey(_percentileColPrefix)
         val percentileCols = percentiles.map { pct =>
@@ -214,19 +221,18 @@
              |FROM `${latencyTableName}`
             """.stripMargin
         }
-        val percentileTransStep =
-          SparkSqlTransformStep(percentileTableName, percentileSql, emptyMap)
-
         val percentileWriteStep = {
           MetricWriteStep(percentileTableName, percentileTableName, DefaultFlattenType)
         }
+        val percentileTransStep =
+          SparkSqlTransformStep(percentileTableName, percentileSql, emptyMap, Some(percentileWriteStep))
+        percentileTransStep.parentSteps += latencyTransStep
 
-        (percentileTransStep :: Nil, percentileWriteStep :: Nil)
-      } else (Nil, Nil)
+        percentileTransStep :: Nil
+      } else Nil
 
       // full steps
-      transSteps1 ++ transSteps2 ++ transSteps3 ++ transSteps4 ++
-        writeSteps1 ++ writeSteps2 ++ writeSteps3 ++ writeSteps4
+      transSteps1 ++ transSteps2 ++ transSteps3 ++ transSteps4
     }
   }
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/UniquenessExpr2DQSteps.scala b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/UniquenessExpr2DQSteps.scala
index 28e9d48..a19b35c 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/UniquenessExpr2DQSteps.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/builder/dsl/transform/UniquenessExpr2DQSteps.scala
@@ -64,7 +64,7 @@
       warn(s"[${timestamp}] data source ${sourceName} not exists")
       Nil
     } else if (!context.runTimeTableRegister.existsTable(targetName)) {
-      println(s"[${timestamp}] data source ${targetName} not exists")
+      warn(s"[${timestamp}] data source ${targetName} not exists")
       Nil
     } else {
       val selItemsClause = analyzer.selectionPairs.map { pair =>
@@ -104,6 +104,8 @@
         s"SELECT ${joinedSelClause} FROM `${targetTableName}` RIGHT JOIN `${sourceTableName}` ON ${onClause}"
       }
       val joinedTransStep = SparkSqlTransformStep(joinedTableName, joinedSql, emptyMap)
+      joinedTransStep.parentSteps += sourceTransStep
+      joinedTransStep.parentSteps += targetTransStep
 
       // 4. group
       val groupTableName = "__group"
@@ -115,7 +117,8 @@
         s"SELECT ${groupSelClause}, (COUNT(*) - 1) AS `${dupColName}` " +
           s"FROM `${joinedTableName}` GROUP BY ${groupSelClause}"
       }
-      val groupTransStep = SparkSqlTransformStep(groupTableName, groupSql, emptyMap, true)
+      val groupTransStep = SparkSqlTransformStep(groupTableName, groupSql, emptyMap, None, true)
+      groupTransStep.parentSteps += joinedTransStep
 
       // 5. total metric
       val totalTableName = "__totalMetric"
@@ -128,8 +131,9 @@
              |FROM `${sourceName}` GROUP BY `${ConstantColumns.tmst}`
            """.stripMargin
       }
-      val totalTransStep = SparkSqlTransformStep(totalTableName, totalSql, emptyMap)
       val totalMetricWriteStep = MetricWriteStep(totalColName, totalTableName, EntriesFlattenType)
+      val totalTransStep =
+        SparkSqlTransformStep(totalTableName, totalSql, emptyMap, Some(totalMetricWriteStep))
 
       // 6. unique record
       val uniqueRecordTableName = "__uniqueRecord"
@@ -138,6 +142,7 @@
       }
       val uniqueRecordTransStep =
         SparkSqlTransformStep(uniqueRecordTableName, uniqueRecordSql, emptyMap)
+      uniqueRecordTransStep.parentSteps += groupTransStep
 
       // 7. unique metric
       val uniqueTableName = "__uniqueMetric"
@@ -151,24 +156,21 @@
              |FROM `${uniqueRecordTableName}` GROUP BY `${ConstantColumns.tmst}`
            """.stripMargin
       }
-      val uniqueTransStep = SparkSqlTransformStep(uniqueTableName, uniqueSql, emptyMap)
-
       val uniqueMetricWriteStep =
         MetricWriteStep(uniqueColName, uniqueTableName, EntriesFlattenType)
+      val uniqueTransStep =
+        SparkSqlTransformStep(uniqueTableName, uniqueSql, emptyMap, Some(uniqueMetricWriteStep))
+      uniqueTransStep.parentSteps += uniqueRecordTransStep
 
-      val transSteps1 = sourceTransStep :: targetTransStep :: joinedTransStep :: groupTransStep ::
-        totalTransStep :: uniqueRecordTransStep :: uniqueTransStep :: Nil
-      val writeSteps1 = totalMetricWriteStep :: uniqueMetricWriteStep :: Nil
+      val transSteps1 = totalTransStep :: uniqueTransStep :: Nil
 
       val duplicationArrayName = details.getString(_duplicationArray, "")
-      val (transSteps2, writeSteps2) = if (duplicationArrayName.nonEmpty) {
+      val transSteps2 = if (duplicationArrayName.nonEmpty) {
         // 8. duplicate record
         val dupRecordTableName = "__dupRecords"
         val dupRecordSql = {
           s"SELECT * FROM `${groupTableName}` WHERE `${dupColName}` > 0"
         }
-        val dupRecordTransStep =
-          SparkSqlTransformStep(dupRecordTableName, dupRecordSql, emptyMap, true)
 
         val dupRecordWriteStep = {
           val rwName =
@@ -177,6 +179,8 @@
 
           RecordWriteStep(rwName, dupRecordTableName)
         }
+        val dupRecordTransStep =
+          SparkSqlTransformStep(dupRecordTableName, dupRecordSql, emptyMap, Some(dupRecordWriteStep), true)
 
         // 9. duplicate metric
         val dupMetricTableName = "__dupMetric"
@@ -197,17 +201,22 @@
              |GROUP BY ${dupMetricGroupbyClause}
           """.stripMargin
         }
-        val dupMetricTransStep = SparkSqlTransformStep(dupMetricTableName, dupMetricSql, emptyMap)
         val dupMetricWriteStep = {
           MetricWriteStep(duplicationArrayName, dupMetricTableName, ArrayFlattenType)
         }
+        val dupMetricTransStep =
+          SparkSqlTransformStep(dupMetricTableName,
+            dupMetricSql,
+            emptyMap,
+            Some(dupMetricWriteStep)
+          )
+        dupMetricTransStep.parentSteps += dupRecordTransStep
 
-        (dupRecordTransStep :: dupMetricTransStep :: Nil,
-          dupRecordWriteStep :: dupMetricWriteStep :: Nil)
-      } else (Nil, Nil)
+        dupMetricTransStep :: Nil
+      } else Nil
 
       // full steps
-      transSteps1 ++ transSteps2 ++ writeSteps1 ++ writeSteps2
+      transSteps1 ++ transSteps2
     }
   }
 
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/transform/DataFrameOpsTransformStep.scala b/measure/src/main/scala/org/apache/griffin/measure/step/transform/DataFrameOpsTransformStep.scala
index 4ac35b2..c393706 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/transform/DataFrameOpsTransformStep.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/transform/DataFrameOpsTransformStep.scala
@@ -19,18 +19,20 @@
 package org.apache.griffin.measure.step.transform
 
 import org.apache.griffin.measure.context.DQContext
+import org.apache.griffin.measure.step.write.WriteStep
 
 /**
   * data frame ops transform step
   */
-case class DataFrameOpsTransformStep(name: String,
+case class DataFrameOpsTransformStep[T <: WriteStep](name: String,
                                      inputDfName: String,
                                      rule: String,
                                      details: Map[String, Any],
+                                     writeStepOpt: Option[T] = None,
                                      cache: Boolean = false
                                     ) extends TransformStep {
 
-  def execute(context: DQContext): Boolean = {
+  def doExecute(context: DQContext): Boolean = {
     val sqlContext = context.sqlContext
     try {
       val df = rule match {
@@ -43,7 +45,10 @@
       }
       if (cache) context.dataFrameCache.cacheDataFrame(name, df)
       context.runTimeTableRegister.registerTable(name, df)
-      true
+      writeStepOpt match {
+        case Some(writeStep) => writeStep.execute(context)
+        case None => true
+      }
     } catch {
       case e: Throwable =>
         error(s"run data frame ops [ ${rule} ] error: ${e.getMessage}", e)
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/transform/SparkSqlTransformStep.scala b/measure/src/main/scala/org/apache/griffin/measure/step/transform/SparkSqlTransformStep.scala
index 39b6a0e..00edf07 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/transform/SparkSqlTransformStep.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/transform/SparkSqlTransformStep.scala
@@ -19,23 +19,27 @@
 package org.apache.griffin.measure.step.transform
 
 import org.apache.griffin.measure.context.DQContext
+import org.apache.griffin.measure.step.write.WriteStep
 
 /**
   * spark sql transform step
   */
-case class SparkSqlTransformStep(name: String,
-                                 rule: String,
-                                 details: Map[String, Any],
-                                 cache: Boolean = false
-                                ) extends TransformStep {
-
-  def execute(context: DQContext): Boolean = {
+case class SparkSqlTransformStep[T <: WriteStep](name: String,
+                                                 rule: String,
+                                                 details: Map[String, Any],
+                                                 writeStepOpt: Option[T] = None,
+                                                 cache: Boolean = false
+                                                ) extends TransformStep {
+  def doExecute(context: DQContext): Boolean = {
     val sqlContext = context.sqlContext
     try {
       val df = sqlContext.sql(rule)
       if (cache) context.dataFrameCache.cacheDataFrame(name, df)
       context.runTimeTableRegister.registerTable(name, df)
-      true
+      writeStepOpt match {
+        case Some(writeStep) => writeStep.execute(context)
+        case None => true
+      }
     } catch {
       case e: Throwable =>
         error(s"run spark sql [ ${rule} ] error: ${e.getMessage}", e)
diff --git a/measure/src/main/scala/org/apache/griffin/measure/step/transform/TransformStep.scala b/measure/src/main/scala/org/apache/griffin/measure/step/transform/TransformStep.scala
index 995ce49..b3318ba 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/step/transform/TransformStep.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/step/transform/TransformStep.scala
@@ -18,7 +18,15 @@
 */
 package org.apache.griffin.measure.step.transform
 
+import scala.collection.mutable.HashSet
+import scala.concurrent.duration.Duration
+import scala.concurrent.ExecutionContext
+import scala.concurrent.Future
+
+import org.apache.griffin.measure.context.DQContext
 import org.apache.griffin.measure.step.DQStep
+import org.apache.griffin.measure.step.DQStepStatus._
+import org.apache.griffin.measure.utils.ThreadUtils
 
 trait TransformStep extends DQStep {
 
@@ -28,4 +36,75 @@
 
   val cache: Boolean
 
+  var status = PENDING
+
+  val parentSteps = new HashSet[TransformStep]
+
+  def doExecute(context: DQContext): Boolean
+
+  def execute(context: DQContext): Boolean = {
+    val threadName = Thread.currentThread().getName
+    info(threadName + " begin transform step : \n" + debugString())
+    // Submit parents Steps
+    val parentStepFutures = parentSteps.filter(checkAndUpdateStatus).map { parentStep =>
+      Future {
+        val result = parentStep.execute(context)
+        parentStep.synchronized {
+          if (result) {
+            parentStep.status = COMPLETE
+          } else {
+            parentStep.status = FAILED
+          }
+        }
+      }(TransformStep.transformStepContext)
+    }
+    ThreadUtils.awaitResult(
+      Future.sequence(parentStepFutures)(implicitly, TransformStep.transformStepContext),
+      Duration.Inf)
+
+    parentSteps.map(step => {
+      while (step.status == RUNNING) {
+        Thread.sleep(1000L)
+      }
+    })
+    val prepared = parentSteps.foldLeft(true)((ret, step) => ret && step.status == COMPLETE)
+    if (prepared) {
+      val res = doExecute(context)
+      info(threadName + " end transform step : \n" + debugString())
+      res
+    } else {
+      error("Parent transform step failed!")
+      false
+    }
+  }
+
+  def checkAndUpdateStatus(step: TransformStep): Boolean = {
+    step.synchronized {
+      if (step.status == PENDING) {
+        step.status = RUNNING
+        true
+      } else {
+        false
+      }
+    }
+  }
+
+  def debugString(level: Int = 0): String = {
+    val stringBuffer = new StringBuilder
+    if (level > 0) {
+      for (i <- 0 to level - 1) {
+        stringBuffer.append("|   ")
+      }
+      stringBuffer.append("|---")
+    }
+    stringBuffer.append(name + "\n")
+    parentSteps.foreach(parentStep => stringBuffer.append(parentStep.debugString(level + 1)))
+    stringBuffer.toString()
+  }
 }
+
+object TransformStep {
+  private[transform] val transformStepContext = ExecutionContext.fromExecutorService(
+    ThreadUtils.newDaemonCachedThreadPool("transform-step"))
+}
+
diff --git a/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
index d23dd46..ffb7e47 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
@@ -77,7 +77,6 @@
     out.close
   }
 
-
   def getHdfsFilePath(parentPath: String, fileName: String): String = {
     if (parentPath.endsWith(seprator)) parentPath + fileName else parentPath + seprator + fileName
   }
@@ -91,9 +90,10 @@
     }
   }
 
-
-  def listSubPathsByType(dirPath: String, subType: String, fullPath: Boolean = false)
-    : Iterable[String] = {
+  def listSubPathsByType(
+                          dirPath: String,
+                          subType: String,
+                          fullPath: Boolean = false) : Iterable[String] = {
     if (existPath(dirPath)) {
       try {
         implicit val path = new Path(dirPath)
@@ -116,8 +116,10 @@
     } else Nil
   }
 
-  def listSubPathsByTypes(dirPath: String, subTypes: Iterable[String], fullPath: Boolean = false)
-    : Iterable[String] = {
+  def listSubPathsByTypes(
+                           dirPath: String,
+                           subTypes: Iterable[String],
+                           fullPath: Boolean = false) : Iterable[String] = {
     subTypes.flatMap { subType =>
       listSubPathsByType(dirPath, subType, fullPath)
     }
diff --git a/measure/src/main/scala/org/apache/griffin/measure/utils/HttpUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/utils/HttpUtil.scala
index 4949642..e679c52 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/HttpUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/HttpUtil.scala
@@ -18,6 +18,9 @@
 */
 package org.apache.griffin.measure.utils
 
+import org.apache.http.client.methods.{HttpGet, HttpPost}
+import org.apache.http.entity.{ContentType, StringEntity}
+import org.apache.http.impl.client.HttpClientBuilder
 import scalaj.http._
 
 object HttpUtil {
@@ -37,6 +40,32 @@
     response.isSuccess
   }
 
+  def doHttpRequest(url: String,
+                  method: String,
+                  params: Map[String, Object],
+                  headers: Map[String, Object],
+                  data: String): Boolean = {
+    val client = HttpClientBuilder.create.build
+    method match {
+      case POST_REGEX() =>
+        val post = new HttpPost(url)
+        convertObjMap2StrMap(headers) foreach (header => post.addHeader(header._1, header._2))
+        post.setEntity(new StringEntity(data, ContentType.APPLICATION_JSON));
+
+        // send the post request
+        val response = client.execute(post)
+        val code = response.getStatusLine.getStatusCode
+        code >= 200 && code < 300
+      case PUT_REGEX() =>
+        val get = new HttpGet(url)
+        convertObjMap2StrMap(headers) foreach (header => get.addHeader(header._1, header._2))
+        val response = client.execute(get)
+        val code = response.getStatusLine.getStatusCode
+        code >= 200 && code < 300
+      case _ => false
+    }
+  }
+
   def httpRequest(url: String,
                   method: String,
                   params: Map[String, Object],
@@ -58,5 +87,4 @@
   private def convertObjMap2StrMap(map: Map[String, Object]): Map[String, String] = {
     map.map(pair => pair._1 -> pair._2.toString)
   }
-
 }
diff --git a/measure/src/main/scala/org/apache/griffin/measure/utils/ThreadUtils.scala b/measure/src/main/scala/org/apache/griffin/measure/utils/ThreadUtils.scala
new file mode 100644
index 0000000..d484ec9
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/ThreadUtils.scala
@@ -0,0 +1,227 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.utils
+
+import java.util.concurrent._
+
+import scala.concurrent.{Awaitable, ExecutionContext, ExecutionContextExecutor}
+import scala.concurrent.duration.Duration
+import scala.concurrent.forkjoin.{ForkJoinPool => SForkJoinPool, ForkJoinWorkerThread => SForkJoinWorkerThread}
+import scala.util.control.NonFatal
+
+import com.google.common.util.concurrent.{MoreExecutors, ThreadFactoryBuilder}
+
+private[griffin] object ThreadUtils {
+
+  private val sameThreadExecutionContext =
+    ExecutionContext.fromExecutorService(MoreExecutors.sameThreadExecutor())
+
+  /**
+   * An `ExecutionContextExecutor` that runs each task in the thread that invokes `execute/submit`.
+   * The caller should make sure the tasks running in this `ExecutionContextExecutor` are short and
+   * never block.
+   */
+  def sameThread: ExecutionContextExecutor = sameThreadExecutionContext
+
+  /**
+   * Create a thread factory that names threads with a prefix and also sets the threads to daemon.
+   */
+  def namedThreadFactory(prefix: String): ThreadFactory = {
+    new ThreadFactoryBuilder().setDaemon(true).setNameFormat(prefix + "-%d").build()
+  }
+
+  /**
+   * Wrapper over newCachedThreadPool. Thread names are formatted as prefix-ID, where ID is a
+   * unique, sequentially assigned integer.
+   */
+  def newDaemonCachedThreadPool(prefix: String): ThreadPoolExecutor = {
+    val threadFactory = namedThreadFactory(prefix)
+    Executors.newCachedThreadPool(threadFactory).asInstanceOf[ThreadPoolExecutor]
+  }
+
+  /**
+   * Create a cached thread pool whose max number of threads is `maxThreadNumber`. Thread names
+   * are formatted as prefix-ID, where ID is a unique, sequentially assigned integer.
+   */
+  def newDaemonCachedThreadPool(
+      prefix: String, maxThreadNumber: Int, keepAliveSeconds: Int = 60): ThreadPoolExecutor = {
+    val threadFactory = namedThreadFactory(prefix)
+    val threadPool = new ThreadPoolExecutor(
+      maxThreadNumber, // corePoolSize: the max number of threads to create before queuing the tasks
+      maxThreadNumber, // maximumPoolSize: because we use LinkedBlockingDeque, this one is not used
+      keepAliveSeconds,
+      TimeUnit.SECONDS,
+      new LinkedBlockingQueue[Runnable],
+      threadFactory)
+    threadPool.allowCoreThreadTimeOut(true)
+    threadPool
+  }
+
+  /**
+   * Wrapper over newFixedThreadPool. Thread names are formatted as prefix-ID, where ID is a
+   * unique, sequentially assigned integer.
+   */
+  def newDaemonFixedThreadPool(nThreads: Int, prefix: String): ThreadPoolExecutor = {
+    val threadFactory = namedThreadFactory(prefix)
+    Executors.newFixedThreadPool(nThreads, threadFactory).asInstanceOf[ThreadPoolExecutor]
+  }
+
+  /**
+   * Wrapper over newSingleThreadExecutor.
+   */
+  def newDaemonSingleThreadExecutor(threadName: String): ExecutorService = {
+    val threadFactory = new ThreadFactoryBuilder().setDaemon(true).setNameFormat(threadName).build()
+    Executors.newSingleThreadExecutor(threadFactory)
+  }
+
+  /**
+   * Wrapper over ScheduledThreadPoolExecutor.
+   */
+  def newDaemonSingleThreadScheduledExecutor(threadName: String): ScheduledExecutorService = {
+    val threadFactory = new ThreadFactoryBuilder().setDaemon(true).setNameFormat(threadName).build()
+    val executor = new ScheduledThreadPoolExecutor(1, threadFactory)
+    // By default, a cancelled task is not automatically removed from the work queue until its delay
+    // elapses. We have to enable it manually.
+    executor.setRemoveOnCancelPolicy(true)
+    executor
+  }
+
+  /**
+   * Run a piece of code in a new thread and return the result. Exception in the new thread is
+   * thrown in the caller thread with an adjusted stack trace that removes references to this
+   * method for clarity. The exception stack traces will be like the following
+   *
+   * SomeException: exception-message
+   *   at CallerClass.body-method (sourcefile.scala)
+   *   at ... run in separate thread using org.apache.griffin.measure.utils.ThreadUtils ... ()
+   *   at CallerClass.caller-method (sourcefile.scala)
+   *   ...
+   */
+  def runInNewThread[T](
+      threadName: String,
+      isDaemon: Boolean = true)(body: => T): T = {
+    @volatile var exception: Option[Throwable] = None
+    @volatile var result: T = null.asInstanceOf[T]
+
+    val thread = new Thread(threadName) {
+      override def run(): Unit = {
+        try {
+          result = body
+        } catch {
+          case NonFatal(e) =>
+            exception = Some(e)
+        }
+      }
+    }
+    thread.setDaemon(isDaemon)
+    thread.start()
+    thread.join()
+
+    exception match {
+      case Some(realException) =>
+        // Remove the part of the stack that shows method calls into this helper method
+        // This means drop everything from the top until the stack element
+        // ThreadUtils.runInNewThread(), and then drop that as well (hence the `drop(1)`).
+        val baseStackTrace = Thread.currentThread().getStackTrace().dropWhile(
+          ! _.getClassName.contains(this.getClass.getSimpleName)).drop(1)
+
+        // Remove the part of the new thread stack that shows methods call from this helper method
+        val extraStackTrace = realException.getStackTrace.takeWhile(
+          ! _.getClassName.contains(this.getClass.getSimpleName))
+
+        // Combine the two stack traces, with a place holder just specifying that there
+        // was a helper method used, without any further details of the helper
+        val placeHolderStackElem = new StackTraceElement(
+          s"... run in separate thread using ${ThreadUtils.getClass.getName.stripSuffix("$")} ..",
+          " ", "", -1)
+        val finalStackTrace = extraStackTrace ++ Seq(placeHolderStackElem) ++ baseStackTrace
+
+        // Update the stack trace and rethrow the exception in the caller thread
+        realException.setStackTrace(finalStackTrace)
+        throw realException
+      case None =>
+        result
+    }
+  }
+
+  /**
+   * Construct a new Scala ForkJoinPool with a specified max parallelism and name prefix.
+   */
+  def newForkJoinPool(prefix: String, maxThreadNumber: Int): SForkJoinPool = {
+    // Custom factory to set thread names
+    val factory = new SForkJoinPool.ForkJoinWorkerThreadFactory {
+      override def newThread(pool: SForkJoinPool) =
+        new SForkJoinWorkerThread(pool) {
+          setName(prefix + "-" + super.getName)
+        }
+    }
+    new SForkJoinPool(maxThreadNumber, factory,
+      null, // handler
+      false // asyncMode
+    )
+  }
+
+  // scalastyle:off awaitresult
+  /**
+   * Preferred alternative to `Await.result()`.
+   *
+   * This method wraps and re-throws any exceptions thrown by the underlying `Await` call, ensuring
+   * that this thread's stack trace appears in logs.
+   *
+   * In addition, it calls `Awaitable.result` directly to avoid using `ForkJoinPool`'s
+   * `BlockingContext`. Codes running in the user's thread may be in a thread of Scala ForkJoinPool.
+   * As concurrent executions in ForkJoinPool may see some [[ThreadLocal]] value unexpectedly, this
+   * method basically prevents ForkJoinPool from running other tasks in the current waiting thread.
+   * In general, we should use this method because it's hard to debug when [[ThreadLocal]]s leak
+   * to other tasks.
+   */
+  @throws(classOf[Exception])
+  def awaitResult[T](awaitable: Awaitable[T], atMost: Duration): T = {
+    try {
+      // `awaitPermission` is not actually used anywhere so it's safe to pass in null here.
+      val awaitPermission = null.asInstanceOf[scala.concurrent.CanAwait]
+      awaitable.result(atMost)(awaitPermission)
+    } catch {
+      // TimeoutException is thrown in the current thread, so not need to warp the exception.
+      case NonFatal(t) if !t.isInstanceOf[TimeoutException] =>
+        throw new Exception("Exception thrown in awaitResult: ", t)
+    }
+  }
+  // scalastyle:on awaitresult
+
+  // scalastyle:off awaitready
+  /**
+   * Preferred alternative to `Await.ready()`.
+   *
+   * @see [[awaitResult]]
+   */
+  @throws(classOf[Exception])
+  def awaitReady[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type = {
+    try {
+      // `awaitPermission` is not actually used anywhere so it's safe to pass in null here.
+      val awaitPermission = null.asInstanceOf[scala.concurrent.CanAwait]
+      awaitable.ready(atMost)(awaitPermission)
+    } catch {
+      // TimeoutException is thrown in the current thread, so not need to warp the exception.
+      case NonFatal(t) if !t.isInstanceOf[TimeoutException] =>
+        throw new Exception("Exception thrown in awaitResult: ", t)
+    }
+  }
+  // scalastyle:on awaitready
+}
\ No newline at end of file
diff --git a/measure/src/test/resources/users_info_src/users_info_src.avro b/measure/src/test/resources/users_info_src/users_info_src.avro
new file mode 100644
index 0000000..3d5c939
--- /dev/null
+++ b/measure/src/test/resources/users_info_src/users_info_src.avro
Binary files differ
diff --git a/measure/src/test/resources/users_info_target/users_info_target.avro b/measure/src/test/resources/users_info_target/users_info_target.avro
new file mode 100644
index 0000000..104dd6c
--- /dev/null
+++ b/measure/src/test/resources/users_info_target/users_info_target.avro
Binary files differ
diff --git a/measure/src/test/scala/org/apache/griffin/measure/context/MetricWrapperTest.scala b/measure/src/test/scala/org/apache/griffin/measure/context/MetricWrapperTest.scala
index c835611..4a49c75 100644
--- a/measure/src/test/scala/org/apache/griffin/measure/context/MetricWrapperTest.scala
+++ b/measure/src/test/scala/org/apache/griffin/measure/context/MetricWrapperTest.scala
@@ -23,19 +23,21 @@
 class MetricWrapperTest extends FlatSpec with Matchers {
 
   "metric wrapper" should "flush empty if no metric inserted" in {
-    val metricWrapper = MetricWrapper("name")
+    val metricWrapper = MetricWrapper("name", "appId")
     metricWrapper.flush should be (Map[Long, Map[String, Any]]())
   }
 
   it should "flush all metrics inserted" in {
-    val metricWrapper = MetricWrapper("test")
+    val metricWrapper = MetricWrapper("test", "appId")
     metricWrapper.insertMetric(1, Map("total" -> 10, "miss"-> 2))
     metricWrapper.insertMetric(1, Map("match" -> 8))
     metricWrapper.insertMetric(2, Map("total" -> 20))
     metricWrapper.insertMetric(2, Map("miss" -> 4))
     metricWrapper.flush should be (Map(
-      1L -> Map("name" -> "test", "tmst" -> 1, "value" -> Map("total" -> 10, "miss"-> 2, "match" -> 8)),
-      2L -> Map("name" -> "test", "tmst" -> 2, "value" -> Map("total" -> 20, "miss"-> 4))
+      1L -> Map("name" -> "test", "tmst" -> 1, "value" -> Map("total" -> 10, "miss"-> 2, "match" -> 8),
+        "metadata" -> Map("applicationId" -> "appId")),
+      2L -> Map("name" -> "test", "tmst" -> 2, "value" -> Map("total" -> 20, "miss"-> 4),
+        "metadata" -> Map("applicationId" -> "appId"))
     ))
   }
 
diff --git a/measure/src/test/scala/org/apache/griffin/measure/job/BatchDQAppTest.scala b/measure/src/test/scala/org/apache/griffin/measure/job/BatchDQAppTest.scala
new file mode 100644
index 0000000..053001a
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/job/BatchDQAppTest.scala
@@ -0,0 +1,155 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.job
+
+import scala.util.{Failure, Success, Try}
+
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.SparkSession
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.griffin.measure.Application.readParamFile
+import org.apache.griffin.measure.configuration.dqdefinition.EnvConfig
+import org.apache.griffin.measure.launch.batch.BatchDQApp
+import org.apache.griffin.measure.step.builder.udf.GriffinUDFAgent
+
+class BatchDQAppTest extends DQAppTest with BeforeAndAfterAll {
+
+  override def beforeAll(): Unit = {
+    super.beforeAll()
+
+    envParam = readParamFile[EnvConfig](getConfigFilePath("/env-batch.json")) match {
+      case Success(p) => p
+      case Failure(ex) =>
+        error(ex.getMessage, ex)
+        sys.exit(-2)
+    }
+
+    sparkParam = envParam.getSparkParam
+
+    Try {
+      // build spark 2.0+ application context
+      var conf = new SparkConf().setAppName("BatchDQApp Test")
+      conf.setAll(sparkParam.getConfig)
+      conf.set("spark.sql.crossJoin.enabled", "true")
+
+      sparkSession = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()
+      val logLevel = getGriffinLogLevel()
+      sparkSession.sparkContext.setLogLevel(sparkParam.getLogLevel)
+      griffinLogger.setLevel(logLevel)
+      val sqlContext = sparkSession.sqlContext
+
+      // register udf
+      GriffinUDFAgent.register(sqlContext)
+    }
+  }
+
+  override def afterAll(): Unit = {
+    super.afterAll()
+    sparkSession.stop()
+  }
+
+  def runAndCheckResult(metrics: Map[String, Any]): Unit = {
+    val runResult = dqApp.run
+    assert(runResult.isSuccess)
+    assert(runResult.get)
+
+    // check Result Metrics
+    val dqContext = dqApp.asInstanceOf[BatchDQApp].dqContext
+    val timestamp = dqContext.contextId.timestamp
+    val expectedMetrics =
+      Map(timestamp -> metrics)
+
+    dqContext.metricWrapper.metrics should equal(expectedMetrics)
+  }
+
+  "accuracy batch job" should "work" in {
+    dqApp = initApp("/_accuracy-batch-griffindsl.json")
+    val expectedMetrics = Map("total_count" -> 50,
+      "miss_count" -> 4,
+      "matched_count" -> 46,
+      "matchedFraction" -> 0.92)
+
+    runAndCheckResult(expectedMetrics)
+  }
+
+  "completeness batch job" should "work" in {
+    dqApp = initApp("/_completeness-batch-griffindsl.json")
+    val expectedMetrics = Map("total" -> 50,
+      "incomplete" -> 1,
+      "complete" -> 49)
+
+    runAndCheckResult(expectedMetrics)
+  }
+
+  "distinctness batch job" should "work" in {
+    dqApp = initApp("/_distinctness-batch-griffindsl.json")
+
+    val expectedMetrics = Map("total" -> 50,
+      "distinct" -> 49,
+      "dup" -> Seq(Map("dup" -> 1, "num" -> 1)))
+
+    runAndCheckResult(expectedMetrics)
+  }
+
+  "profiling batch job" should "work" in {
+    dqApp = initApp("/_profiling-batch-griffindsl.json")
+    val expectedMetrics = Map(
+      "prof" -> Seq(Map("user_id" -> 10004, "cnt" -> 1),
+        Map("user_id" -> 10011, "cnt" -> 1),
+        Map("user_id" -> 10010, "cnt" -> 1),
+        Map("user_id" -> 10002, "cnt" -> 1),
+        Map("user_id" -> 10006, "cnt" -> 1),
+        Map("user_id" -> 10001, "cnt" -> 1),
+        Map("user_id" -> 10005, "cnt" -> 1),
+        Map("user_id" -> 10008, "cnt" -> 1),
+        Map("user_id" -> 10013, "cnt" -> 1),
+        Map("user_id" -> 10003, "cnt" -> 1),
+        Map("user_id" -> 10007, "cnt" -> 1),
+        Map("user_id" -> 10012, "cnt" -> 1),
+        Map("user_id" -> 10009, "cnt" -> 1)
+      ),
+      "post_group" -> Seq(Map("post_code" -> "94022", "cnt" -> 13))
+    )
+
+    runAndCheckResult(expectedMetrics)
+  }
+
+  "timeliness batch job" should "work" in {
+    dqApp = initApp("/_timeliness-batch-griffindsl.json")
+    val expectedMetrics = Map("total" -> 10,
+      "avg" -> 276000,
+      "percentile_95" -> 660000,
+      "step" -> Seq(Map("step" -> 0, "cnt" -> 6),
+        Map("step" -> 5, "cnt" -> 2),
+        Map("step" -> 3, "cnt" -> 1),
+        Map("step" -> 4, "cnt" -> 1)
+      )
+    )
+
+    runAndCheckResult(expectedMetrics)
+  }
+
+  "uniqueness batch job" should "work" in {
+    dqApp = initApp("/_uniqueness-batch-griffindsl.json")
+    val expectedMetrics = Map("total" -> 50, "unique" -> 48)
+
+    runAndCheckResult(expectedMetrics)
+  }
+}
diff --git a/measure/src/test/scala/org/apache/griffin/measure/job/DQAppTest.scala b/measure/src/test/scala/org/apache/griffin/measure/job/DQAppTest.scala
new file mode 100644
index 0000000..ce38408
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/job/DQAppTest.scala
@@ -0,0 +1,70 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.job
+
+import scala.util.{Failure, Success}
+
+import org.apache.spark.sql.SparkSession
+import org.scalatest.{FlatSpec, Matchers}
+
+import org.apache.griffin.measure.Application._
+import org.apache.griffin.measure.Loggable
+import org.apache.griffin.measure.configuration.dqdefinition._
+import org.apache.griffin.measure.configuration.enums._
+import org.apache.griffin.measure.launch.DQApp
+import org.apache.griffin.measure.launch.batch.BatchDQApp
+import org.apache.griffin.measure.launch.streaming.StreamingDQApp
+
+class DQAppTest extends FlatSpec with Matchers with Loggable {
+
+  var envParam: EnvConfig = _
+  var sparkParam: SparkParam = _
+  var sparkSession: SparkSession = _
+
+  var dqApp: DQApp = _
+
+  def getConfigFilePath(fileName: String): String = {
+    getClass.getResource(fileName).getFile
+  }
+
+  def initApp(dqParamFile: String): DQApp = {
+    val dqParam = readParamFile[DQConfig](getConfigFilePath(dqParamFile)) match {
+      case Success(p) => p
+      case Failure(ex) =>
+        error(ex.getMessage, ex)
+        sys.exit(-2)
+    }
+
+    val allParam: GriffinConfig = GriffinConfig(envParam, dqParam)
+
+    // choose process
+    val procType = ProcessType(allParam.getDqConfig.getProcType)
+    dqApp = procType match {
+      case BatchProcessType => new BatchDQApp(allParam)
+      case StreamingProcessType => StreamingDQApp(allParam)
+      case _ =>
+        error(s"${procType} is unsupported process type!")
+        sys.exit(-4)
+    }
+
+    dqApp.sparkSession = sparkSession
+    dqApp
+  }
+
+}
diff --git a/measure/src/test/scala/org/apache/griffin/measure/sink/CustomSink.scala b/measure/src/test/scala/org/apache/griffin/measure/sink/CustomSink.scala
new file mode 100644
index 0000000..01ccaba
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/sink/CustomSink.scala
@@ -0,0 +1,59 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.sink
+
+import scala.collection.mutable
+
+import org.apache.spark.rdd.RDD
+
+/**
+  * sink records and metrics in memory for test.
+  *
+  * @param sinkContext
+  */
+case class CustomSink(sinkContext: SinkContext) extends Sink {
+  val config: Map[String, Any] = sinkContext.config
+  val metricName: String = sinkContext.metricName
+  val timeStamp: Long = sinkContext.timeStamp
+  val block: Boolean = sinkContext.block
+
+  def available(): Boolean = true
+
+  def start(msg: String): Unit = {}
+
+  def finish(): Unit = {}
+
+  def log(rt: Long, msg: String): Unit = {}
+
+  val allRecords = mutable.ListBuffer[String]()
+
+  def sinkRecords(records: RDD[String], name: String): Unit = {
+    allRecords ++= records.collect()
+  }
+
+  def sinkRecords(records: Iterable[String], name: String): Unit = {
+    allRecords ++= records
+  }
+
+  val allMetrics = mutable.Map[String, Any]()
+
+  def sinkMetrics(metrics: Map[String, Any]): Unit = {
+    allMetrics ++= metrics
+  }
+}
diff --git a/measure/src/test/scala/org/apache/griffin/measure/sink/CustomSinkTest.scala b/measure/src/test/scala/org/apache/griffin/measure/sink/CustomSinkTest.scala
new file mode 100644
index 0000000..3eeb430
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/sink/CustomSinkTest.scala
@@ -0,0 +1,136 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.sink
+
+import scala.collection.mutable
+
+import org.apache.griffin.measure.configuration.dqdefinition.{RuleOutputParam, SinkParam}
+import org.apache.griffin.measure.configuration.enums.FlattenType
+import org.apache.griffin.measure.step.write.{MetricFlushStep, MetricWriteStep, RecordWriteStep}
+
+class CustomSinkTest extends SinkTestBase {
+
+  var sinkParam = SinkParam("custom",
+    Map("class" -> "org.apache.griffin.measure.sink.CustomSink"))
+  var sinkParams = Seq(sinkParam)
+
+  def withCustomSink[A](func: (MultiSinks) => A): A = {
+    val sinkFactory = SinkFactory(sinkParams, "Test Sink Factory")
+    val timestamp = System.currentTimeMillis
+    val sinks = sinkFactory.getSinks(timestamp, true)
+    func(sinks)
+  }
+
+  "custom sink" can "sink metrics" in {
+    val actualMetrics = withCustomSink((sinks) => {
+      sinks.sinkMetrics(Map("sum" -> 10))
+      sinks.sinkMetrics(Map("count" -> 5))
+      sinks.headSinkOpt match {
+        case Some(sink: CustomSink) => sink.allMetrics
+        case _ => mutable.ListBuffer[String]()
+      }
+    })
+
+    val expected = Map("sum" -> 10, "count" -> 5)
+    actualMetrics should be(expected)
+  }
+
+  "custom sink" can "sink records" in {
+    val actualRecords = withCustomSink((sinks) => {
+      val rdd1 = createDataFrame(1 to 2)
+      sinks.sinkRecords(rdd1.toJSON.rdd, "test records")
+      val rdd2 = createDataFrame(2 to 4)
+      sinks.sinkRecords(rdd2.toJSON.rdd, "test records")
+      sinks.headSinkOpt match {
+        case Some(sink: CustomSink) => sink.allRecords
+        case _ =>
+      }
+    })
+
+    val expected = List(
+      "{\"id\":1,\"name\":\"name_1\",\"sex\":\"women\",\"age\":16}",
+      "{\"id\":2,\"name\":\"name_2\",\"sex\":\"man\",\"age\":17}",
+      "{\"id\":2,\"name\":\"name_2\",\"sex\":\"man\",\"age\":17}",
+      "{\"id\":3,\"name\":\"name_3\",\"sex\":\"women\",\"age\":18}",
+      "{\"id\":4,\"name\":\"name_4\",\"sex\":\"man\",\"age\":19}")
+
+    actualRecords should be(expected)
+  }
+
+  "RecordWriteStep" should "work with custom sink" in {
+    val resultTable = "result_table"
+    val df = createDataFrame(1 to 5)
+    df.createOrReplaceTempView(resultTable)
+
+    val rwName = Some(metricsDefaultOutput).flatMap(_.getNameOpt).getOrElse(resultTable)
+    val dQContext = getDqContext()
+    RecordWriteStep(rwName, resultTable).execute(dQContext)
+
+    val actualRecords = dQContext.getSink().asInstanceOf[MultiSinks].headSinkOpt match {
+      case Some(sink: CustomSink) => sink.allRecords
+      case _ => mutable.ListBuffer[String]()
+    }
+
+    val expected = List(
+      "{\"id\":1,\"name\":\"name_1\",\"sex\":\"women\",\"age\":16}",
+      "{\"id\":2,\"name\":\"name_2\",\"sex\":\"man\",\"age\":17}",
+      "{\"id\":3,\"name\":\"name_3\",\"sex\":\"women\",\"age\":18}",
+      "{\"id\":4,\"name\":\"name_4\",\"sex\":\"man\",\"age\":19}",
+      "{\"id\":5,\"name\":\"name_5\",\"sex\":\"women\",\"age\":20}")
+
+    actualRecords should be(expected)
+  }
+
+  val metricsDefaultOutput = RuleOutputParam("metrics", "default_output", "default")
+  val metricsEntriesOutput = RuleOutputParam("metrics", "entries_output", "entries")
+  val metricsArrayOutput = RuleOutputParam("metrics", "array_output", "array")
+  val metricsMapOutput = RuleOutputParam("metrics", "map_output", "map")
+
+  "MetricWriteStep" should "output default metrics with custom sink" in {
+    val resultTable = "result_table"
+    val df = createDataFrame(1 to 5)
+    df.groupBy("sex")
+      .agg("age" -> "max", "age" -> "avg")
+      .createOrReplaceTempView(resultTable)
+
+    val dQContext = getDqContext()
+
+    val metricWriteStep = {
+      val metricOpt = Some(metricsDefaultOutput)
+      val mwName = metricOpt.flatMap(_.getNameOpt).getOrElse("default_metrics_name")
+      val flattenType = metricOpt.map(_.getFlatten).getOrElse(FlattenType.default)
+      MetricWriteStep(mwName, resultTable, flattenType)
+    }
+
+    metricWriteStep.execute(dQContext)
+    MetricFlushStep().execute(dQContext)
+    val actualMetrics = dQContext.getSink().asInstanceOf[MultiSinks].headSinkOpt match {
+      case Some(sink: CustomSink) => sink.allMetrics
+      case _ => mutable.Map[String, Any]()
+    }
+
+    val metricsValue = Seq(Map("sex" -> "man", "max(age)" -> 19, "avg(age)" -> 18.0),
+      Map("sex" -> "women", "max(age)" -> 20, "avg(age)" -> 18.0))
+
+    val expected = Map("default_output" -> metricsValue)
+
+    actualMetrics.get("value").get should be(expected)
+  }
+
+}
diff --git a/measure/src/test/scala/org/apache/griffin/measure/sink/SinkTestBase.scala b/measure/src/test/scala/org/apache/griffin/measure/sink/SinkTestBase.scala
new file mode 100644
index 0000000..a88f1ee
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/sink/SinkTestBase.scala
@@ -0,0 +1,61 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.sink
+
+import com.holdenkarau.spark.testing.DataFrameSuiteBase
+import org.apache.spark.sql.DataFrame
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.types._
+import org.scalatest.FlatSpec
+import org.scalatest.Matchers
+
+import org.apache.griffin.measure.Loggable
+import org.apache.griffin.measure.configuration.dqdefinition.SinkParam
+import org.apache.griffin.measure.configuration.enums.BatchProcessType
+import org.apache.griffin.measure.context.{ContextId, DQContext}
+
+trait SinkTestBase extends FlatSpec with Matchers with DataFrameSuiteBase with Loggable {
+
+  var sinkParams: Seq[SinkParam]
+
+  def getDqContext(name: String = "test-context"): DQContext = {
+    DQContext(
+      ContextId(System.currentTimeMillis),
+      name,
+      Nil,
+      sinkParams,
+      BatchProcessType
+    )(spark)
+  }
+
+
+  def createDataFrame(arr: Seq[Int]): DataFrame = {
+    val schema = StructType(Array(
+      StructField("id", LongType),
+      StructField("name", StringType),
+      StructField("sex", StringType),
+      StructField("age", IntegerType)
+    ))
+    val rows = arr.map { i =>
+      Row(i.toLong, s"name_$i", if (i % 2 == 0) "man" else "women", i + 15)
+    }
+    val rowRdd = sqlContext.sparkContext.parallelize(rows)
+    sqlContext.createDataFrame(rowRdd, schema)
+  }
+}
diff --git a/measure/src/test/scala/org/apache/griffin/measure/step/TransformStepTest.scala b/measure/src/test/scala/org/apache/griffin/measure/step/TransformStepTest.scala
new file mode 100644
index 0000000..5314669
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/step/TransformStepTest.scala
@@ -0,0 +1,90 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.step
+
+import com.holdenkarau.spark.testing.DataFrameSuiteBase
+import org.scalatest._
+
+import org.apache.griffin.measure.Loggable
+import org.apache.griffin.measure.configuration.enums.BatchProcessType
+import org.apache.griffin.measure.context.ContextId
+import org.apache.griffin.measure.context.DQContext
+import org.apache.griffin.measure.step.transform.TransformStep
+
+class TransformStepTest extends FlatSpec with Matchers with DataFrameSuiteBase with Loggable {
+
+  case class DualTransformStep(name: String,
+                               duration: Int,
+                               rule: String = "",
+                               details: Map[String, Any] = Map(),
+                               cache: Boolean = false
+                              ) extends TransformStep {
+
+    def doExecute(context: DQContext): Boolean = {
+      val threadName = Thread.currentThread().getName
+      info(s"Step $name started with $threadName")
+      Thread.sleep(duration * 1000L)
+      info(s"Step $name finished with $threadName")
+      true
+    }
+  }
+
+  private def getDqContext(name: String = "test-context"): DQContext = {
+    DQContext(
+      ContextId(System.currentTimeMillis),
+      name,
+      Nil,
+      Nil,
+      BatchProcessType
+    )(spark)
+  }
+
+  /**
+    * Run transform steps in parallel. Here are the dependencies of transform steps
+    *
+    * step5
+    * |   |---step2
+    * |   |   |---step1
+    * |   |---step3
+    * |   |   |---step1
+    * |   |---step4
+    *
+    * step1 : -->
+    * step2 :    --->
+    * step3 :    ---->
+    * step4 : ->
+    * step5 :         -->
+    *
+    */
+  "transform step " should "be run steps in parallel" in {
+    val step1 = DualTransformStep("step1", 3)
+    val step2 = DualTransformStep("step2", 4)
+    step2.parentSteps += step1
+    val step3 = DualTransformStep("step3", 5)
+    step3.parentSteps += step1
+    val step4 = DualTransformStep("step4", 2)
+    val step5 = DualTransformStep("step5", 3)
+    step5.parentSteps += step2
+    step5.parentSteps += step3
+    step5.parentSteps += step4
+
+    val context = getDqContext()
+    step5.execute(context) should be (true)
+  }
+}
diff --git a/merge_pr.py b/merge_pr.py
index 88d5358..4577df6 100755
--- a/merge_pr.py
+++ b/merge_pr.py
@@ -64,7 +64,7 @@
 BRANCH_PREFIX = "PR_TOOL"
 
 PR_REPO = "https://github.com/apache/griffin.git"
-PUSH_REPO = "https://git-wip-us.apache.org/repos/asf/griffin.git"
+PUSH_REPO = "https://gitbox.apache.org/repos/asf/griffin.git"
 
 
 def get_json(url):
diff --git a/pom.xml b/pom.xml
index 983658a..310d1c5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -30,7 +30,7 @@
 
     <groupId>org.apache.griffin</groupId>
     <artifactId>griffin</artifactId>
-    <version>0.4.0-SNAPSHOT</version>
+    <version>0.6.0-SNAPSHOT</version>
     <packaging>pom</packaging>
     <name>Apache Griffin ${project.version}</name>
     <url>http://griffin.apache.org</url>
@@ -75,13 +75,10 @@
         </mailingList>
     </mailingLists>
 
-    <!--<pluginRepositories>-->
-    <!--</pluginRepositories>-->
-
     <scm>
-        <connection>scm:git:https://git-wip-us.apache.org/repos/asf/griffin.git</connection>
-        <developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/griffin.git</developerConnection>
-        <url>https://git-wip-us.apache.org/repos/asf?p=griffin.git;a=summary</url>
+        <connection>scm:git:https://gitbox.apache.org/repos/asf/griffin.git</connection>
+        <developerConnection>scm:git:https://gitbox.apache.org/repos/asf/griffin.git</developerConnection>
+        <url>https://gitbox.apache.org/repos/asf?p=griffin.git;a=summary</url>
         <tag>HEAD</tag>
     </scm>
 
@@ -95,6 +92,16 @@
         <module>measure</module>
     </modules>
 
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>junit</groupId>
+                <artifactId>junit</artifactId>
+                <version>4.12</version>
+                <scope>test</scope>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
 
     <build>
         <pluginManagement>
@@ -167,13 +174,7 @@
                 </plugin>
             </plugins>
         </pluginManagement>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-            </plugin>
 
-        </plugins>
     </build>
 
-</project>
+</project>
\ No newline at end of file
diff --git a/service/pom.xml b/service/pom.xml
index 5a30833..3c4ed80 100644
--- a/service/pom.xml
+++ b/service/pom.xml
@@ -23,7 +23,7 @@
     <parent>
         <groupId>org.apache.griffin</groupId>
         <artifactId>griffin</artifactId>
-        <version>0.4.0-SNAPSHOT</version>
+        <version>0.6.0-SNAPSHOT</version>
     </parent>
 
     <artifactId>service</artifactId>
@@ -32,14 +32,13 @@
     <name>Apache Griffin :: Web Service</name>
 
     <properties>
-        <java.version>1.8</java.version>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
         <hadoop.version>2.7.1</hadoop.version>
         <hive.version>2.2.0</hive.version>
         <scala.version>2.10</scala.version>
         <spring.boot.version>1.5.1.RELEASE</spring.boot.version>
+        <spring.security.kerberos.version>1.0.0.RELEASE</spring.security.kerberos.version>
         <confluent.version>3.2.0</confluent.version>
-        <quartz.version>2.2.1</quartz.version>
+        <quartz.version>2.2.2</quartz.version>
         <start-class>org.apache.griffin.core.GriffinWebApplication</start-class>
         <powermock.version>1.6.6</powermock.version>
         <mockito.version>1.10.19</mockito.version>
@@ -48,6 +47,9 @@
         <eclipselink.version>2.6.0</eclipselink.version>
         <mysql.java.version>5.1.47</mysql.java.version>
         <postgresql.version>9.4.1212.jre7</postgresql.version>
+        <livy.core.version>0.3.0</livy.core.version>
+        <elasticsearch-rest-client.version>6.2.4</elasticsearch-rest-client.version>
+        <jackson-databind.version>2.6.3</jackson-databind.version>
     </properties>
 
     <repositories>
@@ -57,7 +59,6 @@
         </repository>
     </repositories>
 
-
     <dependencyManagement>
         <dependencies>
             <dependency>
@@ -83,6 +84,10 @@
         </dependency>
         <dependency>
             <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-log4j2</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
             <artifactId>spring-boot-starter-data-jpa</artifactId>
             <exclusions>
                 <exclusion>
@@ -99,6 +104,11 @@
             <groupId>org.springframework</groupId>
             <artifactId>spring-aspects</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.springframework.security.kerberos</groupId>
+            <artifactId>spring-security-kerberos-client</artifactId>
+            <version>${spring.security.kerberos.version}</version>
+        </dependency>
         <!--eclipse link-->
         <dependency>
             <groupId>org.eclipse.persistence</groupId>
@@ -111,9 +121,9 @@
             <version>${postgresql.version}</version>
         </dependency>
         <!--<dependency>-->
-            <!--<groupId>mysql</groupId>-->
-            <!--<artifactId>mysql-connector-java</artifactId>-->
-            <!--<version>${mysql.java.version}</version>-->
+        <!--<groupId>mysql</groupId>-->
+        <!--<artifactId>mysql-connector-java</artifactId>-->
+        <!--<version>${mysql.java.version}</version>-->
         <!--</dependency>-->
         <dependency>
             <groupId>com.h2database</groupId>
@@ -127,8 +137,9 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.6.3</version>
+            <version>${jackson-databind.version}</version>
         </dependency>
+
         <!-- to access metastore from hive-->
         <dependency>
             <groupId>org.apache.hadoop</groupId>
@@ -140,6 +151,10 @@
                     <groupId>javax.servlet</groupId>
                     <artifactId>servlet-api</artifactId>
                 </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
         <dependency>
@@ -166,11 +181,46 @@
             </exclusions>
         </dependency>
 
+        <!-- to access Hive using JDBC -->
+        <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-jdbc</artifactId>
+            <version>${hive.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.eclipse.jetty.aggregate</groupId>
+                    <artifactId>*</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.eclipse.jetty.orbit</groupId>
+                    <artifactId>javax.servlet</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>javax.servlet</groupId>
+                    <artifactId>servlet-api</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.mortbay.jetty</groupId>
+                    <artifactId>servlet-api-2.5</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
         <!-- to access confluent schema registry -->
         <dependency>
             <groupId>io.confluent</groupId>
             <artifactId>kafka-schema-registry-client</artifactId>
             <version>${confluent.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+            </exclusions>
         </dependency>
 
         <!--schedule-->
@@ -181,19 +231,19 @@
         <dependency>
             <groupId>org.quartz-scheduler</groupId>
             <artifactId>quartz</artifactId>
-            <version>2.2.2</version>
+            <version>${quartz.version}</version>
         </dependency>
         <dependency>
             <groupId>org.quartz-scheduler</groupId>
             <artifactId>quartz-jobs</artifactId>
-            <version>2.2.2</version>
+            <version>${quartz.version}</version>
         </dependency>
 
         <!--livy-core-->
         <dependency>
             <groupId>com.cloudera.livy</groupId>
-            <artifactId>livy-core_2.10</artifactId>
-            <version>0.3.0</version>
+            <artifactId>livy-core_${scala.version}</artifactId>
+            <version>${livy.core.version}</version>
         </dependency>
 
         <!-- test -->
@@ -206,7 +256,6 @@
         <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
-            <scope>test</scope>
         </dependency>
 
         <dependency>
@@ -225,18 +274,18 @@
         <dependency>
             <groupId>org.elasticsearch.client</groupId>
             <artifactId>elasticsearch-rest-client</artifactId>
-            <version>6.2.4</version>
+            <version>${elasticsearch-rest-client.version}</version>
         </dependency>
     </dependencies>
     <profiles>
         <!--if you need mysql, please uncomment mysql-connector-java -->
         <!--<profile>-->
-            <!--<id>mysql</id>-->
-            <!--<activation>-->
-                <!--<property>-->
-                    <!--<name>mysql</name>-->
-                <!--</property>-->
-            <!--</activation>-->
+        <!--<id>mysql</id>-->
+        <!--<activation>-->
+        <!--<property>-->
+        <!--<name>mysql</name>-->
+        <!--</property>-->
+        <!--</activation>-->
         <!--</profile>-->
         <profile>
             <id>dev</id>
@@ -279,6 +328,22 @@
                 </dependencies>
             </plugin>
             <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-jar-plugin</artifactId>
+                <version>3.1.1</version>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>jar</goal>
+                        </goals>
+                        <configuration>
+                            <classifier>lib</classifier>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
                 <groupId>org.springframework.boot</groupId>
                 <artifactId>spring-boot-maven-plugin</artifactId>
                 <version>${spring-boot-maven-plugin.version}</version>
@@ -298,13 +363,7 @@
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-compiler-plugin</artifactId>
-                <version>3.6.1</version>
-                <configuration>
-                    <source>1.8</source>
-                    <target>1.8</target>
-                </configuration>
-
             </plugin>
         </plugins>
     </build>
-</project>
+</project>
\ No newline at end of file
diff --git a/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java b/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
index 4ed4773..62bdc1f 100644
--- a/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
+++ b/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
@@ -18,7 +18,6 @@
 */
 package org.apache.griffin.core;
 
-
 import org.apache.griffin.core.common.SimpleCORSFilter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -27,16 +26,15 @@
 import org.springframework.context.annotation.Bean;
 import org.springframework.scheduling.annotation.EnableScheduling;
 
-
 @SpringBootApplication
 @EnableScheduling
 public class GriffinWebApplication {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(GriffinWebApplication.class);
+        .getLogger(GriffinWebApplication.class);
 
     public static void main(String[] args) {
-        LOGGER.info("application start");
         SpringApplication.run(GriffinWebApplication.class, args);
+        LOGGER.info("application started");
     }
 
     @Bean
diff --git a/service/src/main/java/org/apache/griffin/core/common/SimpleCORSFilter.java b/service/src/main/java/org/apache/griffin/core/common/SimpleCORSFilter.java
index bd39188..6c8e8c3 100644
--- a/service/src/main/java/org/apache/griffin/core/common/SimpleCORSFilter.java
+++ b/service/src/main/java/org/apache/griffin/core/common/SimpleCORSFilter.java
@@ -32,14 +32,14 @@
     public void doFilter(final ServletRequest req,
                          final ServletResponse res,
                          final FilterChain chain)
-            throws IOException, ServletException {
+        throws IOException, ServletException {
         HttpServletResponse response = (HttpServletResponse) res;
         response.setHeader("Access-Control-Allow-Origin", "*");
         response.setHeader("Access-Control-Allow-Methods",
-                "POST, GET, OPTIONS, DELETE,PUT");
+            "POST, GET, OPTIONS, DELETE,PUT");
         response.setHeader("Access-Control-Max-Age", "3600");
         response.setHeader("Access-Control-Allow-Headers",
-                "X-PINGOTHER, Origin, X-Requested-With, Content-Type, Accept");
+            "X-PINGOTHER, Origin, X-Requested-With, Content-Type, Accept");
         chain.doFilter(req, res);
     }
 
diff --git a/service/src/main/java/org/apache/griffin/core/config/EclipseLinkJpaConfig.java b/service/src/main/java/org/apache/griffin/core/config/EclipseLinkJpaConfig.java
index 1493569..1463d54 100644
--- a/service/src/main/java/org/apache/griffin/core/config/EclipseLinkJpaConfig.java
+++ b/service/src/main/java/org/apache/griffin/core/config/EclipseLinkJpaConfig.java
@@ -37,9 +37,9 @@
 @ComponentScan("org.apache.griffin.core")
 public class EclipseLinkJpaConfig extends JpaBaseConfiguration {
     protected EclipseLinkJpaConfig(
-            DataSource ds, JpaProperties properties,
-            ObjectProvider<JtaTransactionManager> jtm,
-            ObjectProvider<TransactionManagerCustomizers> tmc) {
+        DataSource ds, JpaProperties properties,
+        ObjectProvider<JtaTransactionManager> jtm,
+        ObjectProvider<TransactionManagerCustomizers> tmc) {
         super(ds, properties, jtm, tmc);
     }
 
@@ -53,7 +53,7 @@
         Map<String, Object> map = new HashMap<>();
         map.put(PersistenceUnitProperties.WEAVING, "false");
         map.put(PersistenceUnitProperties.DDL_GENERATION,
-                "create-or-extend-tables");
+            "create-or-extend-tables");
         return map;
     }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/config/EnvConfig.java b/service/src/main/java/org/apache/griffin/core/config/EnvConfig.java
index 8c075a4..ef303a1 100644
--- a/service/src/main/java/org/apache/griffin/core/config/EnvConfig.java
+++ b/service/src/main/java/org/apache/griffin/core/config/EnvConfig.java
@@ -35,7 +35,7 @@
 
 public class EnvConfig {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(EnvConfig.class);
+        .getLogger(EnvConfig.class);
     public static String ENV_BATCH;
     public static String ENV_STREAMING;
 
@@ -47,7 +47,7 @@
      * @throws IOException io exception
      */
     private static String readEnvFromResource(String path)
-            throws IOException {
+        throws IOException {
         if (path == null) {
             LOGGER.warn("Parameter path is null.");
             return null;
@@ -75,7 +75,7 @@
      * @throws IOException io exception
      */
     private static String readEnvFromAbsolutePath(String path)
-            throws IOException {
+        throws IOException {
         if (path == null) {
             LOGGER.warn("Parameter path is null.");
             return null;
@@ -103,7 +103,7 @@
      * @throws IOException io exception
      */
     static String getBatchEnv(String name, String defaultPath, String location)
-            throws IOException {
+        throws IOException {
         if (ENV_BATCH != null) {
             return ENV_BATCH;
         }
@@ -121,7 +121,7 @@
     static String getStreamingEnv(String name,
                                   String defaultPath,
                                   String location)
-            throws IOException {
+        throws IOException {
         if (ENV_STREAMING != null) {
             return ENV_STREAMING;
         }
diff --git a/service/src/main/java/org/apache/griffin/core/config/LoginConfig.java b/service/src/main/java/org/apache/griffin/core/config/LoginConfig.java
index 8c9e4a2..c362755 100644
--- a/service/src/main/java/org/apache/griffin/core/config/LoginConfig.java
+++ b/service/src/main/java/org/apache/griffin/core/config/LoginConfig.java
@@ -53,7 +53,7 @@
                 return new LoginServiceDefaultImpl();
             case "ldap":
                 return new LoginServiceLdapImpl(url, email, searchBase,
-                        searchPattern, sslSkipVerify, bindDN, bindPassword);
+                    searchPattern, sslSkipVerify, bindDN, bindPassword);
             default:
                 return null;
         }
diff --git a/service/src/main/java/org/apache/griffin/core/config/PropertiesConfig.java b/service/src/main/java/org/apache/griffin/core/config/PropertiesConfig.java
index ebbd6cf..53c0dfc 100644
--- a/service/src/main/java/org/apache/griffin/core/config/PropertiesConfig.java
+++ b/service/src/main/java/org/apache/griffin/core/config/PropertiesConfig.java
@@ -42,11 +42,18 @@
 import org.springframework.context.annotation.Configuration;
 import org.springframework.core.io.ClassPathResource;
 
+/**
+ * PropertiesConfig is responsible for initializing configuration objects
+ * from property files.
+ *
+ * @see EnvConfig
+ * @see org.apache.griffin.core.util.PropertiesUtil
+ */
 @Configuration
 public class PropertiesConfig {
 
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(PropertiesConfig.class);
+        .getLogger(PropertiesConfig.class);
 
     public static Map<String, Object> livyConfMap;
 
@@ -55,12 +62,12 @@
     private String envLocation;
 
     public PropertiesConfig(
-            @Value("${external.config.location}") String configLocation,
-            @Value("${external.env.location}") String envLocation) {
+        @Value("${external.config.location}") String configLocation,
+        @Value("${external.env.location}") String envLocation) {
         LOGGER.info("external.config.location : {}",
-                configLocation != null ? configLocation : "null");
+            configLocation != null ? configLocation : "null");
         LOGGER.info("external.env.location : {}",
-                envLocation != null ? envLocation : "null");
+            envLocation != null ? envLocation : "null");
         this.configLocation = configLocation;
         this.envLocation = envLocation;
     }
@@ -93,9 +100,9 @@
     }
 
     private static void genLivyConf(
-            String name,
-            String defaultPath,
-            String location) throws IOException {
+        String name,
+        String defaultPath,
+        String location) throws IOException {
         if (livyConfMap != null) {
             return;
         }
@@ -117,7 +124,7 @@
      * @throws IOException io exception
      */
     private static Map<String, Object> readPropertiesFromResource(String path)
-            throws IOException {
+        throws IOException {
         if (path == null) {
             LOGGER.warn("Parameter path is null.");
             return null;
diff --git a/service/src/main/java/org/apache/griffin/core/config/SchedulerConfig.java b/service/src/main/java/org/apache/griffin/core/config/SchedulerConfig.java
index 7b6af51..843e321 100644
--- a/service/src/main/java/org/apache/griffin/core/config/SchedulerConfig.java
+++ b/service/src/main/java/org/apache/griffin/core/config/SchedulerConfig.java
@@ -44,7 +44,7 @@
     @Bean
     public JobFactory jobFactory(ApplicationContext applicationContext) {
         AutowiringSpringBeanJobFactory jobFactory =
-                new AutowiringSpringBeanJobFactory();
+            new AutowiringSpringBeanJobFactory();
         jobFactory.setApplicationContext(applicationContext);
         return jobFactory;
     }
@@ -60,5 +60,4 @@
         return factory;
     }
 
-
 }
diff --git a/service/src/main/java/org/apache/griffin/core/event/GriffinEventManager.java b/service/src/main/java/org/apache/griffin/core/event/GriffinEventManager.java
index 996d7a7..e0dee89 100644
--- a/service/src/main/java/org/apache/griffin/core/event/GriffinEventManager.java
+++ b/service/src/main/java/org/apache/griffin/core/event/GriffinEventManager.java
@@ -19,16 +19,16 @@
 
 package org.apache.griffin.core.event;
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+import javax.annotation.PostConstruct;
+
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.context.ApplicationContext;
 import org.springframework.stereotype.Component;
 
-import javax.annotation.PostConstruct;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-
 @Component
 public class GriffinEventManager {
     @Autowired
@@ -43,11 +43,11 @@
     void initializeListeners() {
         List<GriffinHook> eventListeners = new ArrayList<>();
         applicationContext.getBeansOfType(GriffinHook.class)
-                .forEach((beanName, listener) -> {
-                    if (enabledListeners.contains(beanName)) {
-                        eventListeners.add(listener);
-                    }
-                });
+            .forEach((beanName, listener) -> {
+                if (enabledListeners.contains(beanName)) {
+                    eventListeners.add(listener);
+                }
+            });
         this.eventListeners = eventListeners;
     }
 
diff --git a/service/src/main/java/org/apache/griffin/core/event/JobEvent.java b/service/src/main/java/org/apache/griffin/core/event/JobEvent.java
index 5fdea0f..467cf28 100644
--- a/service/src/main/java/org/apache/griffin/core/event/JobEvent.java
+++ b/service/src/main/java/org/apache/griffin/core/event/JobEvent.java
@@ -32,29 +32,29 @@
 
     public static JobEvent yieldJobEventBeforeCreation(AbstractJob source) {
         return new JobEvent(source,
-                EventType.CREATION_EVENT,
-                EventSourceType.JOB,
-                EventPointcutType.BEFORE);
+            EventType.CREATION_EVENT,
+            EventSourceType.JOB,
+            EventPointcutType.BEFORE);
     }
 
     public static JobEvent yieldJobEventAfterCreation(AbstractJob source) {
         return new JobEvent(source,
-                EventType.CREATION_EVENT,
-                EventSourceType.JOB,
-                EventPointcutType.AFTER);
+            EventType.CREATION_EVENT,
+            EventSourceType.JOB,
+            EventPointcutType.AFTER);
     }
 
     public static JobEvent yieldJobEventBeforeRemoval(AbstractJob source) {
         return new JobEvent(source,
-                EventType.REMOVAL_EVENT,
-                EventSourceType.JOB,
-                EventPointcutType.BEFORE);
+            EventType.REMOVAL_EVENT,
+            EventSourceType.JOB,
+            EventPointcutType.BEFORE);
     }
 
     public static JobEvent yieldJobEventAfterRemoval(AbstractJob source) {
         return new JobEvent(source,
-                EventType.REMOVAL_EVENT,
-                EventSourceType.JOB,
-                EventPointcutType.AFTER);
+            EventType.REMOVAL_EVENT,
+            EventSourceType.JOB,
+            EventPointcutType.AFTER);
     }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/exception/GriffinExceptionHandler.java b/service/src/main/java/org/apache/griffin/core/exception/GriffinExceptionHandler.java
index d987da7..5fb88fb 100644
--- a/service/src/main/java/org/apache/griffin/core/exception/GriffinExceptionHandler.java
+++ b/service/src/main/java/org/apache/griffin/core/exception/GriffinExceptionHandler.java
@@ -34,28 +34,28 @@
     @SuppressWarnings("rawtypes")
     @ExceptionHandler(GriffinException.ServiceException.class)
     public ResponseEntity handleGriffinExceptionOfServer(
-            HttpServletRequest request,
-            GriffinException.ServiceException e) {
+        HttpServletRequest request,
+        GriffinException.ServiceException e) {
         String message = e.getMessage();
         Throwable cause = e.getCause();
         GriffinExceptionResponse body = new GriffinExceptionResponse(
-                HttpStatus.INTERNAL_SERVER_ERROR,
-                message, request.getRequestURI(), cause.getClass().getName());
+            HttpStatus.INTERNAL_SERVER_ERROR,
+            message, request.getRequestURI(), cause.getClass().getName());
         return new ResponseEntity<>(body, HttpStatus.INTERNAL_SERVER_ERROR);
     }
 
     @SuppressWarnings("rawtypes")
     @ExceptionHandler(GriffinException.class)
     public ResponseEntity handleGriffinExceptionOfClient(
-            HttpServletRequest request, GriffinException e) {
+        HttpServletRequest request, GriffinException e) {
         ResponseStatus responseStatus = AnnotationUtils.findAnnotation(
-                e.getClass(), ResponseStatus.class);
+            e.getClass(), ResponseStatus.class);
         HttpStatus status = responseStatus.code();
         String code = e.getMessage();
         GriffinExceptionMessage message = GriffinExceptionMessage
-                .valueOf(Integer.valueOf(code));
+            .valueOf(Integer.valueOf(code));
         GriffinExceptionResponse body = new GriffinExceptionResponse(
-                status, message, request.getRequestURI());
+            status, message, request.getRequestURI());
         return new ResponseEntity<>(body, status);
     }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/exception/GriffinExceptionMessage.java b/service/src/main/java/org/apache/griffin/core/exception/GriffinExceptionMessage.java
index ae6a0ea..5a39c66 100644
--- a/service/src/main/java/org/apache/griffin/core/exception/GriffinExceptionMessage.java
+++ b/service/src/main/java/org/apache/griffin/core/exception/GriffinExceptionMessage.java
@@ -23,9 +23,9 @@
 
     //400, "Bad Request"
     MEASURE_TYPE_DOES_NOT_MATCH(40001, "Property 'measure.type' does not match"
-            + "the type of measure in request body"),
+        + "the type of measure in request body"),
     INVALID_CONNECTOR_NAME(40002, "Property 'name' in 'connectors' " +
-            "field is invalid"),
+        "field is invalid"),
     MISSING_METRIC_NAME(40003, "Missing property 'metricName'"),
     INVALID_JOB_NAME(40004, "Property 'job.name' is invalid"),
     MISSING_BASELINE_CONFIG(40005, "Missing 'as.baseline' config in 'data.segments'"),
@@ -45,15 +45,17 @@
     JOB_TYPE_DOES_NOT_SUPPORT(40011, "We don't support such job type."),
 
     STREAMING_JOB_IS_RUNNING(40012, "There is no need to start again " +
-            "as job is RUNNING."),
+        "as job is RUNNING."),
     STREAMING_JOB_IS_STOPPED(40012, "There is no need to stop again " +
-            "as job is STOPPED."),
+        "as job is STOPPED."),
     JOB_IS_NOT_SCHEDULED(40013, "The job isn't scheduled."),
 
     JOB_IS_NOT_IN_PAUSED_STATUS(40014, "The job isn't in paused status."),
 
     JOB_IS_IN_PAUSED_STATUS(40015, "The job is already in paused status."),
 
+    INVALID_MEASURE_PREDICATE(40016, "The measure predicate is invalid"),
+
     //404, "Not Found"
     MEASURE_ID_DOES_NOT_EXIST(40401, "Measure id does not exist"),
 
@@ -64,12 +66,18 @@
     NO_SUCH_JOB_ACTION(40404, "No such job action"),
 
     JOB_KEY_DOES_NOT_EXIST(40405, "Job key which consists of " +
-            "group and name does not exist."),
+        "group and name does not exist."),
     ORGANIZATION_NAME_DOES_NOT_EXIST(40406, "Organization name " +
-            "does not exist"),
+        "does not exist"),
 
     HDFS_FILE_NOT_EXIST(40407, "Hadoop data file not exist"),
 
+    PREDICATE_TYPE_NOT_FOUND(40408, "Unknown predicate type"),
+
+    INSTANCE_ID_DOES_NOT_EXIST(40409, "Instance id does not exist"),
+
+    JOB_INSTANCE_NOT_FOUND(40410, "No job instances with given job instance id found"),
+
     //409, "Conflict"
     MEASURE_NAME_ALREADY_EXIST(40901, "Measure name already exists"),
 
@@ -93,7 +101,7 @@
             }
         }
         throw new IllegalArgumentException("No matching constant for ["
-                + code + "]");
+            + code + "]");
     }
 
 
diff --git a/service/src/main/java/org/apache/griffin/core/info/GriffinInfoController.java b/service/src/main/java/org/apache/griffin/core/info/GriffinInfoController.java
index 4280095..f4b449c 100644
--- a/service/src/main/java/org/apache/griffin/core/info/GriffinInfoController.java
+++ b/service/src/main/java/org/apache/griffin/core/info/GriffinInfoController.java
@@ -29,6 +29,6 @@
 
     @RequestMapping(value = "/version", method = RequestMethod.GET)
     public String greeting() {
-        return "0.4.0";
+        return "0.5.0";
     }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/job/BatchJobOperatorImpl.java b/service/src/main/java/org/apache/griffin/core/job/BatchJobOperatorImpl.java
index 0210822..cdaa68c 100644
--- a/service/src/main/java/org/apache/griffin/core/job/BatchJobOperatorImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/job/BatchJobOperatorImpl.java
@@ -71,7 +71,7 @@
 @Service
 public class BatchJobOperatorImpl implements JobOperator {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(BatchJobOperatorImpl.class);
+        .getLogger(BatchJobOperatorImpl.class);
 
     @Autowired
     private SchedulerFactoryBean factory;
@@ -85,7 +85,7 @@
     @Override
     @Transactional(rollbackFor = Exception.class)
     public AbstractJob add(AbstractJob job, GriffinMeasure measure)
-            throws Exception {
+        throws Exception {
         validateParams(job, measure);
         String qName = jobService.getQuartzName(job);
         String qGroup = jobService.getQuartzGroup();
@@ -120,20 +120,20 @@
         TriggerState state = getTriggerState(name, group);
         if (state == null) {
             throw new GriffinException.BadRequestException(
-                    JOB_IS_NOT_SCHEDULED);
+                JOB_IS_NOT_SCHEDULED);
         }
         /* If job is not in paused state,we can't start it
         as it may be RUNNING.*/
         if (state != PAUSED) {
             throw new GriffinException.BadRequestException
-                    (JOB_IS_NOT_IN_PAUSED_STATUS);
+                (JOB_IS_NOT_IN_PAUSED_STATUS);
         }
         JobKey jobKey = jobKey(name, group);
         try {
             factory.getScheduler().resumeJob(jobKey);
         } catch (SchedulerException e) {
             throw new GriffinException.ServiceException(
-                    "Failed to start job.", e);
+                "Failed to start job.", e);
         }
     }
 
@@ -151,14 +151,14 @@
 
     @Override
     public JobHealth getHealth(JobHealth jobHealth, AbstractJob job)
-            throws SchedulerException {
+        throws SchedulerException {
         List<? extends Trigger> triggers = jobService
-                .getTriggers(job.getName(), job.getGroup());
+            .getTriggers(job.getName(), job.getGroup());
         if (!CollectionUtils.isEmpty(triggers)) {
             jobHealth.setJobCount(jobHealth.getJobCount() + 1);
             if (jobService.isJobHealthy(job.getId())) {
                 jobHealth.setHealthyJobCount(
-                        jobHealth.getHealthyJobCount() + 1);
+                    jobHealth.getHealthyJobCount() + 1);
             }
         }
         return jobHealth;
@@ -166,7 +166,7 @@
 
     @Override
     public JobState getState(AbstractJob job, String action)
-            throws SchedulerException {
+        throws SchedulerException {
         JobState jobState = new JobState();
         Scheduler scheduler = factory.getScheduler();
         if (job.getGroup() == null || job.getName() == null) {
@@ -182,9 +182,9 @@
     }
 
     private void setTriggerTime(AbstractJob job, JobState jobState)
-            throws SchedulerException {
+        throws SchedulerException {
         List<? extends Trigger> triggers = jobService
-                .getTriggers(job.getName(), job.getGroup());
+            .getTriggers(job.getName(), job.getGroup());
         // If triggers are empty, in Griffin it means job is completed whose
         // trigger state is NONE or not scheduled.
         if (CollectionUtils.isEmpty(triggers)) {
@@ -194,9 +194,9 @@
         Date nextFireTime = trigger.getNextFireTime();
         Date previousFireTime = trigger.getPreviousFireTime();
         jobState.setNextFireTime(nextFireTime != null ?
-                nextFireTime.getTime() : -1);
+            nextFireTime.getTime() : -1);
         jobState.setPreviousFireTime(previousFireTime != null ?
-                previousFireTime.getTime() : -1);
+            previousFireTime.getTime() : -1);
     }
 
     /**
@@ -225,7 +225,7 @@
     private TriggerState getTriggerState(String name, String group) {
         try {
             List<? extends Trigger> triggers = jobService.getTriggers(name,
-                    group);
+                group);
             if (CollectionUtils.isEmpty(triggers)) {
                 return null;
             }
@@ -234,7 +234,7 @@
         } catch (SchedulerException e) {
             LOGGER.error("Failed to delete job", e);
             throw new GriffinException
-                    .ServiceException("Failed to delete job", e);
+                .ServiceException("Failed to delete job", e);
         }
 
     }
@@ -254,7 +254,7 @@
         } catch (Exception e) {
             LOGGER.error("Job schedule happens exception.", e);
             throw new GriffinException.ServiceException("Job schedule " +
-                    "happens exception.", e);
+                "happens exception.", e);
         }
     }
 
@@ -263,7 +263,7 @@
         for (JobInstanceBean instance : instances) {
             if (!instance.isPredicateDeleted()) {
                 deleteJob(instance.getPredicateGroup(), instance
-                        .getPredicateName());
+                    .getPredicateName());
                 instance.setPredicateDeleted(true);
                 if (instance.getState().equals(LivySessionStates.State.FINDING)) {
                     instance.setState(LivySessionStates.State.NOT_FOUND);
@@ -277,7 +277,7 @@
         JobKey jobKey = new JobKey(name, group);
         if (!scheduler.checkExists(jobKey)) {
             LOGGER.info("Job({},{}) does not exist.", jobKey.getGroup(), jobKey
-                    .getName());
+                .getName());
             return;
         }
         scheduler.deleteJob(jobKey);
@@ -292,9 +292,9 @@
         JobKey jobKey = new JobKey(name, group);
         if (!scheduler.checkExists(jobKey)) {
             LOGGER.warn("Job({},{}) does not exist.", jobKey.getGroup(), jobKey
-                    .getName());
+                .getName());
             throw new GriffinException.NotFoundException
-                    (JOB_KEY_DOES_NOT_EXIST);
+                (JOB_KEY_DOES_NOT_EXIST);
         }
         scheduler.pauseJob(jobKey);
     }
@@ -326,7 +326,7 @@
             }
         } catch (SchedulerException e) {
             LOGGER.error("Failed to pause predicate job({},{}).", pGroup,
-                    pName);
+                pName);
             status = false;
         }
         return status;
@@ -338,16 +338,16 @@
         }
         if (!isValidCronExpression(job.getCronExpression())) {
             throw new GriffinException.BadRequestException
-                    (INVALID_CRON_EXPRESSION);
+                (INVALID_CRON_EXPRESSION);
         }
         if (!isValidBaseLine(job.getSegments())) {
             throw new GriffinException.BadRequestException
-                    (MISSING_BASELINE_CONFIG);
+                (MISSING_BASELINE_CONFIG);
         }
         List<String> names = getConnectorNames(measure);
         if (!isValidConnectorNames(job.getSegments(), names)) {
             throw new GriffinException.BadRequestException
-                    (INVALID_CONNECTOR_NAME);
+                (INVALID_CONNECTOR_NAME);
         }
     }
 
@@ -371,7 +371,7 @@
             }
         }
         LOGGER.warn("Please set segment timestamp baseline " +
-                "in as.baseline field.");
+            "in as.baseline field.");
         return false;
     }
 
@@ -383,16 +383,16 @@
             String dcName = segment.getDataConnectorName();
             sets.add(dcName);
             boolean exist = names.stream().anyMatch(name -> name.equals
-                    (dcName));
+                (dcName));
             if (!exist) {
                 LOGGER.warn("Param {} is a illegal string. " +
-                        "Please input one of strings in {}.", dcName, names);
+                    "Please input one of strings in {}.", dcName, names);
                 return false;
             }
         }
         if (sets.size() < segments.size()) {
             LOGGER.warn("Connector names in job data segment " +
-                    "cannot duplicate.");
+                "cannot duplicate.");
             return false;
         }
         return true;
diff --git a/service/src/main/java/org/apache/griffin/core/job/FileExistPredicator.java b/service/src/main/java/org/apache/griffin/core/job/FileExistPredicator.java
index 703a837..1a9209b 100644
--- a/service/src/main/java/org/apache/griffin/core/job/FileExistPredicator.java
+++ b/service/src/main/java/org/apache/griffin/core/job/FileExistPredicator.java
@@ -33,7 +33,7 @@
 
 public class FileExistPredicator implements Predicator {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(FileExistPredicator.class);
+        .getLogger(FileExistPredicator.class);
 
     private static final String PREDICT_PATH = "path";
     private static final String PREDICT_ROOT_PATH = "root.path";
@@ -49,15 +49,14 @@
         Map<String, Object> config = predicate.getConfigMap();
         String[] paths = null;
         String rootPath = null;
-        if (config != null && !StringUtils.isEmpty((String) config.get
-                (PREDICT_PATH))) {
-            paths = ((String) config.get(PREDICT_PATH)).split
-                    (PATH_CONNECTOR_CHARACTER);
+        if (config != null && !StringUtils.isEmpty((String) config.get(PREDICT_PATH))) {
+            paths = ((String) config.get(PREDICT_PATH))
+                .split(PATH_CONNECTOR_CHARACTER);
             rootPath = (String) config.get(PREDICT_ROOT_PATH);
         }
         if (ArrayUtils.isEmpty(paths) || StringUtils.isEmpty(rootPath)) {
             LOGGER.error("Predicate path is null.Please check predicates " +
-                    "config root.path and path.");
+                "config root.path and path.");
             throw new NullPointerException();
         }
         for (String path : paths) {
diff --git a/service/src/main/java/org/apache/griffin/core/job/JobController.java b/service/src/main/java/org/apache/griffin/core/job/JobController.java
index f4ee791..3b52ee1 100644
--- a/service/src/main/java/org/apache/griffin/core/job/JobController.java
+++ b/service/src/main/java/org/apache/griffin/core/job/JobController.java
@@ -19,7 +19,9 @@
 
 package org.apache.griffin.core.job;
 
+import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.griffin.core.job.entity.AbstractJob;
 import org.apache.griffin.core.job.entity.JobHealth;
@@ -49,7 +51,7 @@
 
     @RequestMapping(value = "/jobs", method = RequestMethod.GET)
     public List<AbstractJob> getJobs(@RequestParam(value = "type",
-            defaultValue = "") String type) {
+        defaultValue = "") String type) {
         return jobService.getAliveJobs(type);
     }
 
@@ -67,33 +69,38 @@
     @RequestMapping(value = "/jobs/{id}", method = RequestMethod.PUT)
     @ResponseStatus(HttpStatus.OK)
     public AbstractJob onActions(
-            @PathVariable("id") Long jobId,
-            @RequestParam String action) throws Exception {
+        @PathVariable("id") Long jobId,
+        @RequestParam String action) throws Exception {
         return jobService.onAction(jobId, action);
     }
 
     @RequestMapping(value = "/jobs", method = RequestMethod.DELETE)
     @ResponseStatus(HttpStatus.NO_CONTENT)
     public void deleteJob(@RequestParam("jobName") String jobName)
-            throws SchedulerException {
+        throws SchedulerException {
         jobService.deleteJob(jobName);
     }
 
     @RequestMapping(value = "/jobs/{id}", method = RequestMethod.DELETE)
     @ResponseStatus(HttpStatus.NO_CONTENT)
     public void deleteJob(@PathVariable("id") Long id)
-            throws SchedulerException {
+        throws SchedulerException {
         jobService.deleteJob(id);
     }
 
     @RequestMapping(value = "/jobs/instances", method = RequestMethod.GET)
     public List<JobInstanceBean> findInstancesOfJob(
-            @RequestParam("jobId") Long id,
-            @RequestParam("page") int page,
-            @RequestParam("size") int size) {
+        @RequestParam("jobId") Long id,
+        @RequestParam("page") int page,
+        @RequestParam("size") int size) {
         return jobService.findInstancesOfJob(id, page, size);
     }
 
+    @RequestMapping(value = "/jobs/instances/{instanceId}", method = RequestMethod.GET)
+    public JobInstanceBean findInstanceByInstanceId(@PathVariable("instanceId") Long id) {
+        return jobService.findInstance(id);
+    }
+
     @RequestMapping(value = "/jobs/health", method = RequestMethod.GET)
     public JobHealth getHealthInfo() {
         return jobService.getHealthInfo();
@@ -108,9 +115,20 @@
         InputStreamResource resource = new InputStreamResource(
             FSUtil.getMissSampleInputStream(path));
         return ResponseEntity.ok().
-                header("content-disposition",
-                        "attachment; filename = sampleMissingData.json")
-                .contentType(MediaType.APPLICATION_OCTET_STREAM)
-                .body(resource);
+            header("content-disposition",
+                "attachment; filename = sampleMissingData.json")
+            .contentType(MediaType.APPLICATION_OCTET_STREAM)
+            .body(resource);
+    }
+
+    @RequestMapping(value = "/jobs/trigger/{id}", method = RequestMethod.POST)
+    @ResponseStatus(HttpStatus.OK)
+    public Map<String, Object> triggerJob(@PathVariable("id") Long id, @RequestBody(required = false) String request) throws SchedulerException {
+        return Collections.singletonMap("triggerKey", jobService.triggerJobById(id));
+    }
+
+    @RequestMapping(value = "jobs/triggerKeys/{triggerKey:.+}", method = RequestMethod.GET)
+    public List<JobInstanceBean> findInstanceByTriggerKey(@PathVariable("triggerKey") String triggerKey) {
+        return jobService.findInstancesByTriggerKey(triggerKey);
     }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/job/JobInstance.java b/service/src/main/java/org/apache/griffin/core/job/JobInstance.java
index 47b42f3..3b0f768 100644
--- a/service/src/main/java/org/apache/griffin/core/job/JobInstance.java
+++ b/service/src/main/java/org/apache/griffin/core/job/JobInstance.java
@@ -40,7 +40,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.TimeZone;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.griffin.core.exception.GriffinException;
@@ -79,16 +78,17 @@
 @DisallowConcurrentExecution
 public class JobInstance implements Job {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(JobInstance.class);
+        .getLogger(JobInstance.class);
     public static final String MEASURE_KEY = "measure";
     public static final String PREDICATES_KEY = "predicts";
     public static final String PREDICATE_JOB_NAME = "predicateJobName";
+    private static final String TRIGGER_KEY = "trigger";
     static final String JOB_NAME = "jobName";
     static final String PATH_CONNECTOR_CHARACTER = ",";
     public static final String INTERVAL = "interval";
     public static final String REPEAT = "repeat";
     public static final String CHECK_DONEFILE_SCHEDULE =
-            "checkdonefile.schedule";
+        "checkdonefile.schedule";
 
     @Autowired
     private SchedulerFactoryBean factory;
@@ -119,7 +119,7 @@
     }
 
     private void initParam(JobExecutionContext context)
-            throws SchedulerException {
+        throws SchedulerException {
         mPredicates = new ArrayList<>();
         JobDetail jobDetail = context.getJobDetail();
         Long jobId = jobDetail.getJobDataMap().getLong(GRIFFIN_JOB_ID);
@@ -127,22 +127,25 @@
         Long measureId = job.getMeasureId();
         measure = measureRepo.findOne(measureId);
         setJobStartTime(jobDetail);
+        if (job.getConfigMap() == null) {
+            job.setConfigMap(new HashMap<>());
+        }
+        job.getConfigMap().put(TRIGGER_KEY, context.getTrigger().getKey().toString());
     }
 
     @SuppressWarnings("unchecked")
     private void setJobStartTime(JobDetail jobDetail)
-            throws SchedulerException {
+        throws SchedulerException {
         Scheduler scheduler = factory.getScheduler();
         JobKey jobKey = jobDetail.getKey();
         List<Trigger> triggers =
-                (List<Trigger>) scheduler.getTriggersOfJob(jobKey);
+            (List<Trigger>) scheduler.getTriggersOfJob(jobKey);
         Date triggerTime = triggers.get(0).getPreviousFireTime();
         jobStartTime = triggerTime.getTime();
     }
 
-
     private void setSourcesPartitionsAndPredicates(List<DataSource> sources)
-            throws Exception {
+        throws Exception {
         boolean isFirstBaseline = true;
         for (JobDataSegment jds : job.getSegments()) {
             if (jds.isAsTsBaseline() && isFirstBaseline) {
@@ -158,17 +161,16 @@
     }
 
     private void setDataSourcePartitions(JobDataSegment jds, DataSource ds)
-            throws Exception {
+        throws Exception {
         List<DataConnector> connectors = ds.getConnectors();
         for (DataConnector dc : connectors) {
             setDataConnectorPartitions(jds, dc);
         }
     }
 
-
     private void setDataConnectorPartitions(
-            JobDataSegment jds,
-            DataConnector dc) throws Exception {
+        JobDataSegment jds,
+        DataConnector dc) throws Exception {
         String dcName = jds.getDataConnectorName();
         if (dcName.equals(dc.getName())) {
             Long[] sampleTs = genSampleTs(jds.getSegmentRange(), dc);
@@ -189,7 +191,7 @@
         Long range = TimeUtil.str2Long(segRange.getLength());
         String unit = dc.getDataUnit();
         Long dataUnit = TimeUtil.str2Long(StringUtils.isEmpty(unit) ? dc
-                .getDefaultDataUnit() : unit);
+            .getDefaultDataUnit() : unit);
         //offset usually is negative
         Long dataStartTime = jobStartTime + offset;
         if (range < 0) {
@@ -217,8 +219,8 @@
         List<SegmentPredicate> predicates = dc.getPredicates();
         for (SegmentPredicate predicate : predicates) {
             genConfMap(predicate.getConfigMap(),
-                    sampleTs,
-                    dc.getDataTimeZone());
+                sampleTs,
+                dc.getDataTimeZone());
             //Do not forget to update origin string config
             predicate.setConfigMap(predicate.getConfigMap());
             mPredicates.add(predicate);
@@ -230,7 +232,6 @@
         dc.setConfigMap(dc.getConfigMap());
     }
 
-
     /**
      * @param conf     config map
      * @param sampleTs collection of data split start timestamp
@@ -240,9 +241,8 @@
      * or like {"path": "/year=2017/month=11/dt=15/hour=09/_DONE
      * ,/year=2017/month=11/dt=15/hour=10/_DONE"}
      */
-
     private void genConfMap(Map<String, Object> conf, Long[] sampleTs, String
-            timezone) {
+        timezone) {
         if (conf == null) {
             LOGGER.warn("Predicate config is null.");
             return;
@@ -257,38 +257,38 @@
                 }
                 for (Long timestamp : sampleTs) {
                     set.add(TimeUtil.format(value, timestamp,
-                            TimeUtil.getTimeZone(timezone)));
+                        TimeUtil.getTimeZone(timezone)));
                 }
                 conf.put(entry.getKey(), StringUtils.join(set,
-                        PATH_CONNECTOR_CHARACTER));
+                    PATH_CONNECTOR_CHARACTER));
             }
         }
     }
 
     @SuppressWarnings("unchecked")
     private void createJobInstance(Map<String, Object> confMap)
-            throws Exception {
+        throws Exception {
         confMap = checkConfMap(confMap != null ? confMap : new HashMap<>());
         Map<String, Object> config = (Map<String, Object>) confMap
-                .get(CHECK_DONEFILE_SCHEDULE);
+            .get(CHECK_DONEFILE_SCHEDULE);
         Long interval = TimeUtil.str2Long((String) config.get(INTERVAL));
         Integer repeat = Integer.valueOf(config.get(REPEAT).toString());
         String groupName = "PG";
-        String jobName = job.getJobName() + "_predicate_" + System
-                .currentTimeMillis();
+        String jobName = job.getJobName() + "_predicate_"
+            + System.currentTimeMillis();
         TriggerKey tk = triggerKey(jobName, groupName);
         if (factory.getScheduler().checkExists(tk)) {
-            throw new GriffinException.ConflictException
-                    (QUARTZ_JOB_ALREADY_EXIST);
+            throw new GriffinException.ConflictException(QUARTZ_JOB_ALREADY_EXIST);
         }
-        saveJobInstance(jobName, groupName);
+        String triggerKey = (String) confMap.get(TRIGGER_KEY);
+        saveJobInstance(jobName, groupName, triggerKey);
         createJobInstance(tk, interval, repeat, jobName);
     }
 
     @SuppressWarnings("unchecked")
     Map<String, Object> checkConfMap(Map<String, Object> confMap) {
         Map<String, Object> config = (Map<String, Object>) confMap.get
-                (CHECK_DONEFILE_SCHEDULE);
+            (CHECK_DONEFILE_SCHEDULE);
         String interval = env.getProperty("predicate.job.interval");
         interval = interval != null ? interval : "5m";
         String repeat = env.getProperty("predicate.job.repeat.count");
@@ -309,39 +309,38 @@
         return confMap;
     }
 
-    private void saveJobInstance(String pName, String pGroup) {
+    private void saveJobInstance(String pName, String pGroup, String triggerKey) {
         ProcessType type = measure.getProcessType() == BATCH ? BATCH :
-                STREAMING;
+            STREAMING;
         Long tms = System.currentTimeMillis();
         String expired = env.getProperty("jobInstance.expired.milliseconds");
         Long expireTms = Long.valueOf(expired != null ? expired : "604800000")
-                + tms;
+            + tms;
         JobInstanceBean instance = new JobInstanceBean(FINDING, pName, pGroup,
-                tms, expireTms, type);
+            tms, expireTms, type);
         instance.setJob(job);
+        instance.setTriggerKey(triggerKey);
         instanceRepo.save(instance);
     }
 
-
     private void createJobInstance(TriggerKey tk, Long interval, Integer
-            repeatCount, String pJobName) throws Exception {
+        repeatCount, String pJobName) throws Exception {
         JobDetail jobDetail = addJobDetail(tk, pJobName);
         Trigger trigger = genTriggerInstance(tk, jobDetail, interval,
-                repeatCount);
+            repeatCount);
         factory.getScheduler().scheduleJob(trigger);
     }
 
-
     private Trigger genTriggerInstance(TriggerKey tk, JobDetail jd, Long
-            interval, Integer repeatCount) {
+        interval, Integer repeatCount) {
         return newTrigger().withIdentity(tk).forJob(jd).startNow()
-                .withSchedule(simpleSchedule().withIntervalInMilliseconds
-                        (interval).withRepeatCount(repeatCount))
-                .build();
+            .withSchedule(simpleSchedule().withIntervalInMilliseconds
+                (interval).withRepeatCount(repeatCount))
+            .build();
     }
 
     private JobDetail addJobDetail(TriggerKey tk, String pJobName)
-            throws SchedulerException, IOException {
+        throws SchedulerException, IOException {
         Scheduler scheduler = factory.getScheduler();
         JobKey jobKey = jobKey(tk.getName(), tk.getGroup());
         JobDetail jobDetail;
@@ -350,9 +349,9 @@
             jobDetail = scheduler.getJobDetail(jobKey);
         } else {
             jobDetail = newJob(SparkSubmitJob.class)
-                    .storeDurably()
-                    .withIdentity(jobKey)
-                    .build();
+                .storeDurably()
+                .withIdentity(jobKey)
+                .build();
         }
         setJobDataMap(jobDetail, pJobName);
         scheduler.addJob(jobDetail, isJobKeyExist);
@@ -360,7 +359,7 @@
     }
 
     private void setJobDataMap(JobDetail jobDetail, String pJobName)
-            throws IOException {
+        throws IOException {
         JobDataMap dataMap = jobDetail.getJobDataMap();
         preProcessMeasure();
         String result = toJson(measure);
diff --git a/service/src/main/java/org/apache/griffin/core/job/JobOperator.java b/service/src/main/java/org/apache/griffin/core/job/JobOperator.java
index 81c3b17..05279f9 100644
--- a/service/src/main/java/org/apache/griffin/core/job/JobOperator.java
+++ b/service/src/main/java/org/apache/griffin/core/job/JobOperator.java
@@ -27,7 +27,7 @@
 
 public interface JobOperator {
     AbstractJob add(AbstractJob job, GriffinMeasure measure)
-            throws Exception;
+        throws Exception;
 
     void start(AbstractJob job) throws Exception;
 
@@ -36,8 +36,8 @@
     void delete(AbstractJob job) throws SchedulerException;
 
     JobHealth getHealth(JobHealth jobHealth, AbstractJob job)
-            throws SchedulerException;
+        throws SchedulerException;
 
     JobState getState(AbstractJob job, String action)
-            throws SchedulerException;
+        throws SchedulerException;
 }
diff --git a/service/src/main/java/org/apache/griffin/core/job/JobService.java b/service/src/main/java/org/apache/griffin/core/job/JobService.java
index 58e541f..d3492e8 100644
--- a/service/src/main/java/org/apache/griffin/core/job/JobService.java
+++ b/service/src/main/java/org/apache/griffin/core/job/JobService.java
@@ -42,7 +42,13 @@
 
     List<JobInstanceBean> findInstancesOfJob(Long jobId, int page, int size);
 
+    List<JobInstanceBean> findInstancesByTriggerKey(String triggerKey);
+
     JobHealth getHealthInfo();
 
     String getJobHdfsSinksPath(String jobName, long timestamp);
+
+    JobInstanceBean findInstance(Long id);
+
+    String triggerJobById(Long id) throws SchedulerException;
 }
diff --git a/service/src/main/java/org/apache/griffin/core/job/JobServiceImpl.java b/service/src/main/java/org/apache/griffin/core/job/JobServiceImpl.java
index 69f965f..f41eb2a 100644
--- a/service/src/main/java/org/apache/griffin/core/job/JobServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/job/JobServiceImpl.java
@@ -19,12 +19,47 @@
 
 package org.apache.griffin.core.job;
 
+import static java.util.TimeZone.getTimeZone;
+import static org.apache.griffin.core.config.EnvConfig.ENV_BATCH;
+import static org.apache.griffin.core.config.EnvConfig.ENV_STREAMING;
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.INSTANCE_ID_DOES_NOT_EXIST;
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_MEASURE_ID;
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_ID_DOES_NOT_EXIST;
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_NAME_DOES_NOT_EXIST;
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_TYPE_DOES_NOT_SUPPORT;
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.MEASURE_TYPE_DOES_NOT_SUPPORT;
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.NO_SUCH_JOB_ACTION;
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.QUARTZ_JOB_ALREADY_EXIST;
+import static org.apache.griffin.core.job.entity.LivySessionStates.State.BUSY;
+import static org.apache.griffin.core.job.entity.LivySessionStates.State.DEAD;
+import static org.apache.griffin.core.job.entity.LivySessionStates.State.IDLE;
+import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_STARTED;
+import static org.apache.griffin.core.job.entity.LivySessionStates.State.RECOVERING;
+import static org.apache.griffin.core.job.entity.LivySessionStates.State.RUNNING;
+import static org.apache.griffin.core.job.entity.LivySessionStates.State.STARTING;
+import static org.apache.griffin.core.job.entity.LivySessionStates.State.SUCCESS;
+import static org.apache.griffin.core.job.entity.LivySessionStates.State.UNKNOWN;
+import static org.apache.griffin.core.job.entity.LivySessionStates.isActive;
+import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.BATCH;
+import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.STREAMING;
+import static org.quartz.CronScheduleBuilder.cronSchedule;
+import static org.quartz.JobBuilder.newJob;
+import static org.quartz.JobKey.jobKey;
+import static org.quartz.SimpleScheduleBuilder.simpleSchedule;
+import static org.quartz.TriggerBuilder.newTrigger;
+import static org.quartz.TriggerKey.triggerKey;
+
 import com.fasterxml.jackson.core.type.TypeReference;
 
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.TimeZone;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.griffin.core.event.GriffinEventManager;
-import org.apache.griffin.core.exception.GriffinException;
 import org.apache.griffin.core.event.JobEvent;
+import org.apache.griffin.core.exception.GriffinException;
 import org.apache.griffin.core.job.entity.AbstractJob;
 import org.apache.griffin.core.job.entity.BatchJob;
 import org.apache.griffin.core.job.entity.JobHealth;
@@ -66,40 +101,6 @@
 import org.springframework.util.CollectionUtils;
 import org.springframework.web.client.HttpClientErrorException;
 import org.springframework.web.client.ResourceAccessException;
-import org.springframework.web.client.RestTemplate;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.TimeZone;
-
-import static java.util.TimeZone.getTimeZone;
-import static org.apache.griffin.core.config.EnvConfig.ENV_BATCH;
-import static org.apache.griffin.core.config.EnvConfig.ENV_STREAMING;
-import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_MEASURE_ID;
-import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_ID_DOES_NOT_EXIST;
-import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_NAME_DOES_NOT_EXIST;
-import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_TYPE_DOES_NOT_SUPPORT;
-import static org.apache.griffin.core.exception.GriffinExceptionMessage.MEASURE_TYPE_DOES_NOT_SUPPORT;
-import static org.apache.griffin.core.exception.GriffinExceptionMessage.NO_SUCH_JOB_ACTION;
-import static org.apache.griffin.core.exception.GriffinExceptionMessage.QUARTZ_JOB_ALREADY_EXIST;
-import static org.apache.griffin.core.job.entity.LivySessionStates.State.BUSY;
-import static org.apache.griffin.core.job.entity.LivySessionStates.State.DEAD;
-import static org.apache.griffin.core.job.entity.LivySessionStates.State.IDLE;
-import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_STARTED;
-import static org.apache.griffin.core.job.entity.LivySessionStates.State.RECOVERING;
-import static org.apache.griffin.core.job.entity.LivySessionStates.State.RUNNING;
-import static org.apache.griffin.core.job.entity.LivySessionStates.State.STARTING;
-import static org.apache.griffin.core.job.entity.LivySessionStates.State.UNKNOWN;
-import static org.apache.griffin.core.job.entity.LivySessionStates.isActive;
-import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.BATCH;
-import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.STREAMING;
-import static org.quartz.CronScheduleBuilder.cronSchedule;
-import static org.quartz.JobBuilder.newJob;
-import static org.quartz.JobKey.jobKey;
-import static org.quartz.SimpleScheduleBuilder.simpleSchedule;
-import static org.quartz.TriggerBuilder.newTrigger;
-import static org.quartz.TriggerKey.triggerKey;
 
 @Service
 public class JobServiceImpl implements JobService {
@@ -131,11 +132,11 @@
     private StreamingJobOperatorImpl streamingJobOp;
     @Autowired
     private GriffinEventManager eventManager;
-
-    private RestTemplate restTemplate;
+    @Autowired
+    private LivyTaskSubmitHelper livyTaskSubmitHelper;
 
     public JobServiceImpl() {
-        restTemplate = new RestTemplate();
+
     }
 
     @Override
@@ -284,6 +285,17 @@
         return updateState(instances);
     }
 
+    @Override
+    public JobInstanceBean findInstance(Long id) {
+        JobInstanceBean bean = instanceRepo.findByInstanceId(id);
+        if (bean == null) {
+            LOGGER.warn("Instance id {} does not exist.", id);
+            throw new GriffinException
+                .NotFoundException(INSTANCE_ID_DOES_NOT_EXIST);
+        }
+        return bean;
+    }
+
     private List<JobInstanceBean> updateState(List<JobInstanceBean> instances) {
         for (JobInstanceBean instance : instances) {
             State state = instance.getState();
@@ -294,6 +306,11 @@
         return instances;
     }
 
+    @Override
+    public List<JobInstanceBean> findInstancesByTriggerKey(String triggerKey) {
+        return instanceRepo.findByTriggerKey(triggerKey);
+    }
+
     /**
      * a job is regard as healthy job when its latest instance is in healthy
      * state.
@@ -518,7 +535,9 @@
             new TypeReference<HashMap<String, Object>>() {
             };
         try {
-            String resultStr = restTemplate.getForObject(uri, String.class);
+            String resultStr = livyTaskSubmitHelper.getFromLivy(uri);
+            LOGGER.info(resultStr);
+
             HashMap<String, Object> resultMap = JsonUtil.toEntity(resultStr,
                 type);
             setJobInstanceIdAndUri(instance, resultMap);
@@ -529,6 +548,7 @@
             LOGGER.warn("sessionId({}) appId({}) {}.", instance.getSessionId(),
                 instance.getAppId(), e.getMessage());
             setStateByYarn(instance, e);
+            livyTaskSubmitHelper.decreaseCurTaskNum(instance.getId());
         } catch (Exception e) {
             LOGGER.error(e.getMessage());
         }
@@ -606,6 +626,10 @@
             instance.setAppUri(appId == null ? null : env
                 .getProperty("yarn.uri") + "/cluster/app/" + appId);
             instanceRepo.save(instance);
+            // If Livy returns to success or dead, task execution completes one,TaskNum--
+            if (SUCCESS.equals(state) || DEAD.equals(state)) {
+                livyTaskSubmitHelper.decreaseCurTaskNum(instance.getId());
+            }
         }
     }
 
@@ -652,4 +676,22 @@
             return null;
         }
     }
+
+    @Override
+    public String triggerJobById(Long id) throws SchedulerException {
+        AbstractJob job = jobRepo.findByIdAndDeleted(id, false);
+        validateJobExist(job);
+        Scheduler scheduler = factory.getScheduler();
+        JobKey jobKey = jobKey(job.getName(), job.getGroup());
+        if (scheduler.checkExists(jobKey)) {
+            Trigger trigger = TriggerBuilder.newTrigger()
+                .forJob(jobKey)
+                .startNow()
+                .build();
+            scheduler.scheduleJob(trigger);
+            return trigger.getKey().toString();
+        } else {
+            throw new GriffinException.NotFoundException(JOB_ID_DOES_NOT_EXIST);
+        }
+    }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/job/LivyTaskSubmitHelper.java b/service/src/main/java/org/apache/griffin/core/job/LivyTaskSubmitHelper.java
new file mode 100644
index 0000000..cf50527
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/job/LivyTaskSubmitHelper.java
@@ -0,0 +1,322 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.job;
+
+import static org.apache.griffin.core.config.PropertiesConfig.livyConfMap;
+import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_FOUND;
+import static org.apache.griffin.core.util.JsonUtil.toEntity;
+import static org.apache.griffin.core.util.JsonUtil.toJsonWithFormat;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicInteger;
+import javax.annotation.PostConstruct;
+
+import org.apache.commons.collections.map.HashedMap;
+import org.quartz.JobDetail;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.core.env.Environment;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.security.kerberos.client.KerberosRestTemplate;
+import org.springframework.stereotype.Component;
+import org.springframework.web.client.RestTemplate;
+
+@Component
+public class LivyTaskSubmitHelper {
+    private static final Logger LOGGER = LoggerFactory.getLogger(LivyTaskSubmitHelper.class);
+    private static final String REQUEST_BY_HEADER = "X-Requested-By";
+    public static final int DEFAULT_QUEUE_SIZE = 20000;
+    private static final int SLEEP_TIME = 300;
+
+    private SparkSubmitJob sparkSubmitJob;
+    private ConcurrentMap<Long, Integer> taskAppIdMap = new ConcurrentHashMap<>();
+    // Current number of tasks
+    private AtomicInteger curConcurrentTaskNum = new AtomicInteger(0);
+    private String workerNamePre;
+    private RestTemplate restTemplate = new RestTemplate();
+    // queue for pub or sub
+    private BlockingQueue<JobDetail> queue;
+    private String uri;
+
+    @Value("${livy.task.max.concurrent.count:20}")
+    private int maxConcurrentTaskCount;
+    @Value("${livy.task.submit.interval.second:3}")
+    private int batchIntervalSecond;
+
+    @Autowired
+    private Environment env;
+
+    /**
+     * Initialize related parameters and open consumer threads.
+     */
+    @PostConstruct
+    public void init() {
+        startWorker();
+        uri = env.getProperty("livy.uri");
+        LOGGER.info("Livy uri : {}", uri);
+    }
+
+    public LivyTaskSubmitHelper() {
+        this.workerNamePre = "livy-task-submit-worker";
+    }
+
+    /**
+     * Initialize blocking queues and start consumer threads.
+     */
+    public void startWorker() {
+        queue = new LinkedBlockingQueue<>(DEFAULT_QUEUE_SIZE);
+        ExecutorService executorService = Executors.newSingleThreadExecutor();
+        TaskInner taskInner = new TaskInner(executorService);
+        executorService.execute(taskInner);
+    }
+
+    /**
+     * Put job detail into the queue.
+     *
+     * @param jd job detail.
+     */
+    public void addTaskToWaitingQueue(JobDetail jd) throws IOException {
+        if (jd == null) {
+            LOGGER.warn("task is blank, workerNamePre: {}", workerNamePre);
+            return;
+        }
+
+        if (queue.remainingCapacity() <= 0) {
+            LOGGER.warn("task is discard, workerNamePre: {}, task: {}", workerNamePre, jd);
+            sparkSubmitJob.saveJobInstance(null, NOT_FOUND);
+            return;
+        }
+
+        queue.add(jd);
+        LOGGER.info("add_task_to_waiting_queue_success, workerNamePre: {}, task: {}",
+            workerNamePre, jd);
+    }
+
+    /**
+     * Consumer thread.
+     */
+    class TaskInner implements Runnable {
+        private ExecutorService es;
+
+        public TaskInner(ExecutorService es) {
+            this.es = es;
+        }
+
+        public void run() {
+            long insertTime = System.currentTimeMillis();
+            while (true) {
+                try {
+                    if (curConcurrentTaskNum.get() < maxConcurrentTaskCount
+                        && (System.currentTimeMillis() - insertTime) >= batchIntervalSecond * 1000) {
+                        JobDetail jd = queue.take();
+                        sparkSubmitJob.saveJobInstance(jd);
+                        insertTime = System.currentTimeMillis();
+                    } else {
+                        Thread.sleep(SLEEP_TIME);
+                    }
+                } catch (Exception e) {
+                    LOGGER.error("Async_worker_doTask_failed, {}", e.getMessage(), e);
+                    es.execute(this);
+                }
+            }
+        }
+    }
+
+    /**
+     * Add the batch id returned by Livy.
+     *
+     * @param scheduleId livy batch id.
+     */
+    public void increaseCurTaskNum(Long scheduleId) {
+        curConcurrentTaskNum.incrementAndGet();
+        if (scheduleId != null) {
+            taskAppIdMap.put(scheduleId, 1);
+        }
+    }
+
+    /**
+     * Remove tasks after job status updates.
+     *
+     * @param scheduleId livy batch id.
+     */
+    public void decreaseCurTaskNum(Long scheduleId) {
+        if (scheduleId != null && taskAppIdMap.containsKey(scheduleId)) {
+            curConcurrentTaskNum.decrementAndGet();
+            taskAppIdMap.remove(scheduleId);
+        }
+    }
+
+    protected Map<String, Object> retryLivyGetAppId(String result, int appIdRetryCount)
+        throws IOException {
+
+        int retryCount = appIdRetryCount;
+        TypeReference<HashMap<String, Object>> type =
+            new TypeReference<HashMap<String, Object>>() {
+            };
+        Map<String, Object> resultMap = toEntity(result, type);
+
+        if (retryCount <= 0) {
+            return null;
+        }
+
+        if (resultMap.get("appId") != null) {
+            return resultMap;
+        }
+
+        Object livyBatchesId = resultMap.get("id");
+        if (livyBatchesId == null) {
+            return resultMap;
+        }
+
+        while (retryCount-- > 0) {
+            try {
+                Thread.sleep(SLEEP_TIME);
+            } catch (InterruptedException e) {
+                LOGGER.error(e.getMessage(), e);
+            }
+            resultMap = getResultByLivyId(livyBatchesId, type);
+            LOGGER.info("retry get livy resultMap: {}, batches id : {}", resultMap, livyBatchesId);
+
+            if (resultMap.get("appId") != null) {
+                break;
+            }
+        }
+
+        return resultMap;
+    }
+
+    private Map<String, Object> getResultByLivyId(Object livyBatchesId, TypeReference<HashMap<String, Object>> type)
+        throws IOException {
+        Map<String, Object> resultMap = new HashedMap();
+        String livyUri = uri + "/" + livyBatchesId;
+        String result = getFromLivy(livyUri);
+        LOGGER.info(result);
+        return result == null ? resultMap : toEntity(result, type);
+    }
+
+    public String postToLivy(String uri) {
+        LOGGER.info("Post To Livy URI is: " + uri);
+        String needKerberos = env.getProperty("livy.need.kerberos");
+        LOGGER.info("Need Kerberos:" + needKerberos);
+
+        HttpHeaders headers = new HttpHeaders();
+        headers.setContentType(MediaType.APPLICATION_JSON);
+        headers.set(REQUEST_BY_HEADER, "admin");
+
+        if (needKerberos == null || needKerberos.isEmpty()) {
+            LOGGER.error("The property \"livy.need.kerberos\" is empty");
+            return null;
+        }
+
+        if (needKerberos.equalsIgnoreCase("false")) {
+            LOGGER.info("The livy server doesn't need Kerberos Authentication");
+            String result = null;
+            try {
+                HttpEntity<String> springEntity = new HttpEntity<>(toJsonWithFormat(livyConfMap), headers);
+                result = restTemplate.postForObject(uri, springEntity, String.class);
+                LOGGER.info(result);
+            } catch (JsonProcessingException e) {
+                LOGGER.error("Post to livy ERROR. \n {}", e.getMessage());
+            }
+            return result;
+        } else {
+            LOGGER.info("The livy server needs Kerberos Authentication");
+            String userPrincipal = env.getProperty("livy.server.auth.kerberos.principal");
+            String keyTabLocation = env.getProperty("livy.server.auth.kerberos.keytab");
+            LOGGER.info("principal:{}, lcoation:{}", userPrincipal, keyTabLocation);
+
+            KerberosRestTemplate restTemplate = new KerberosRestTemplate(keyTabLocation, userPrincipal);
+            HttpEntity<String> springEntity = null;
+            try {
+                springEntity = new HttpEntity<>(toJsonWithFormat(livyConfMap), headers);
+            } catch (JsonProcessingException e) {
+                LOGGER.error("Json Parsing failed, {}", e.getMessage(), e);
+            }
+            String result = restTemplate.postForObject(uri, springEntity, String.class);
+            LOGGER.info(result);
+            return result;
+        }
+    }
+
+    public String getFromLivy(String uri) {
+        LOGGER.info("Get From Livy URI is: " + uri);
+        String needKerberos = env.getProperty("livy.need.kerberos");
+        LOGGER.info("Need Kerberos:" + needKerberos);
+
+        if (needKerberos == null || needKerberos.isEmpty()) {
+            LOGGER.error("The property \"livy.need.kerberos\" is empty");
+            return null;
+        }
+
+        if (needKerberos.equalsIgnoreCase("false")) {
+            LOGGER.info("The livy server doesn't need Kerberos Authentication");
+            return restTemplate.getForObject(uri, String.class);
+        } else {
+            LOGGER.info("The livy server needs Kerberos Authentication");
+            String userPrincipal = env.getProperty("livy.server.auth.kerberos.principal");
+            String keyTabLocation = env.getProperty("livy.server.auth.kerberos.keytab");
+            LOGGER.info("principal:{}, lcoation:{}", userPrincipal, keyTabLocation);
+
+            KerberosRestTemplate restTemplate = new KerberosRestTemplate(keyTabLocation, userPrincipal);
+            String result = restTemplate.getForObject(uri, String.class);
+            LOGGER.info(result);
+            return result;
+        }
+    }
+
+    public void deleteByLivy(String uri) {
+        LOGGER.info("Delete by Livy URI is: " + uri);
+        String needKerberos = env.getProperty("livy.need.kerberos");
+        LOGGER.info("Need Kerberos:" + needKerberos);
+
+        if (needKerberos == null || needKerberos.isEmpty()) {
+            LOGGER.error("The property \"livy.need.kerberos\" is empty");
+            return;
+        }
+
+        if (needKerberos.equalsIgnoreCase("false")) {
+            LOGGER.info("The livy server doesn't need Kerberos Authentication");
+            new RestTemplate().delete(uri);
+        } else {
+            LOGGER.info("The livy server needs Kerberos Authentication");
+            String userPrincipal = env.getProperty("livy.server.auth.kerberos.principal");
+            String keyTabLocation = env.getProperty("livy.server.auth.kerberos.keytab");
+            LOGGER.info("principal:{}, lcoation:{}", userPrincipal, keyTabLocation);
+
+            KerberosRestTemplate restTemplate = new KerberosRestTemplate(keyTabLocation, userPrincipal);
+            restTemplate.delete(uri);
+        }
+    }
+}
diff --git a/service/src/main/java/org/apache/griffin/core/job/Predicator.java b/service/src/main/java/org/apache/griffin/core/job/Predicator.java
index dd9e105..153157e 100644
--- a/service/src/main/java/org/apache/griffin/core/job/Predicator.java
+++ b/service/src/main/java/org/apache/griffin/core/job/Predicator.java
@@ -21,6 +21,16 @@
 
 import java.io.IOException;
 
+/**
+ * Predicator is an object that judges if one condition is met.
+ * This interface only has one method {@link #predicate()}
+ */
 public interface Predicator {
+    /**
+     * predicate a condition
+     *
+     * @return True condition is met, otherwise False
+     * @throws IOException
+     */
     boolean predicate() throws IOException;
 }
diff --git a/service/src/main/java/org/apache/griffin/core/job/SparkSubmitJob.java b/service/src/main/java/org/apache/griffin/core/job/SparkSubmitJob.java
index cd0e7e8..e48053d 100644
--- a/service/src/main/java/org/apache/griffin/core/job/SparkSubmitJob.java
+++ b/service/src/main/java/org/apache/griffin/core/job/SparkSubmitJob.java
@@ -57,36 +57,44 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
 import org.springframework.core.env.Environment;
 import org.springframework.stereotype.Component;
 import org.springframework.util.CollectionUtils;
 import org.springframework.util.StringUtils;
-import org.springframework.web.client.HttpClientErrorException;
-import org.springframework.web.client.RestTemplate;
-import org.springframework.http.HttpEntity;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.MediaType;
-import static org.apache.griffin.core.util.JsonUtil.toJsonWithFormat;
 
+/**
+ * Simple implementation of the Quartz Job interface, submitting the
+ * griffin job to spark cluster via livy
+ *
+ * @see LivyTaskSubmitHelper#postToLivy(String)
+ * @see Job#execute(JobExecutionContext) 
+ */
 @PersistJobDataAfterExecution
 @DisallowConcurrentExecution
 @Component
 public class SparkSubmitJob implements Job {
     private static final Logger LOGGER =
-            LoggerFactory.getLogger(SparkSubmitJob.class);
-    private static final String REQUEST_BY_HEADER = "X-Requested-By";
+        LoggerFactory.getLogger(SparkSubmitJob.class);
+
     @Autowired
     private JobInstanceRepo jobInstanceRepo;
     @Autowired
     private BatchJobOperatorImpl batchJobOp;
     @Autowired
     private Environment env;
+    @Autowired
+    private LivyTaskSubmitHelper livyTaskSubmitHelper;
+
+    @Value("${livy.need.queue:false}")
+    private boolean isNeedLivyQueue;
+    @Value("${livy.task.appId.retry.count:3}")
+    private int appIdRetryCount;
 
     private GriffinMeasure measure;
     private String livyUri;
     private List<SegmentPredicate> mPredicates;
     private JobInstanceBean jobInstance;
-    private RestTemplate restTemplate = new RestTemplate();
 
     @Override
     public void execute(JobExecutionContext context) {
@@ -98,14 +106,19 @@
                 updateJobInstanceState(context);
                 return;
             }
-            saveJobInstance(jd);
+            if (isNeedLivyQueue) {
+                //livy batch limit
+                livyTaskSubmitHelper.addTaskToWaitingQueue(jd);
+            } else {
+                saveJobInstance(jd);
+            }
         } catch (Exception e) {
             LOGGER.error("Post spark task ERROR.", e);
         }
     }
 
-    private void updateJobInstanceState(JobExecutionContext context) throws
-            IOException {
+    private void updateJobInstanceState(JobExecutionContext context)
+        throws IOException {
         SimpleTrigger simpleTrigger = (SimpleTrigger) context.getTrigger();
         int repeatCount = simpleTrigger.getRepeatCount();
         int fireCount = simpleTrigger.getTimesTriggered();
@@ -115,33 +128,17 @@
     }
 
     private String post2Livy() {
-        String result = null;
-        try {
-            HttpHeaders headers = new HttpHeaders();
-            headers.setContentType(MediaType.APPLICATION_JSON);
-            headers.set(REQUEST_BY_HEADER,"admin");
-          
-            HttpEntity<String> springEntity = new HttpEntity<String>(toJsonWithFormat(livyConfMap), headers );
-            result = restTemplate.postForObject(livyUri,springEntity,String.class);
-           
-            LOGGER.info(result);
-        } catch (HttpClientErrorException e) {
-            LOGGER.error("Post to livy ERROR. \n {} {}",
-                    e.getMessage(),
-                    e.getResponseBodyAsString());
-        } catch (Exception e) {
-            LOGGER.error("Post to livy ERROR. {}", e.getMessage());
-        }
-        return result;
+        return livyTaskSubmitHelper.postToLivy(livyUri);
     }
 
     private boolean success(List<SegmentPredicate> predicates) {
         if (CollectionUtils.isEmpty(predicates)) {
             return true;
         }
+
         for (SegmentPredicate segPredicate : predicates) {
             Predicator predicator = PredicatorFactory
-                    .newPredicateInstance(segPredicate);
+                .newPredicateInstance(segPredicate);
             try {
                 if (predicator != null && !predicator.predicate()) {
                     return false;
@@ -149,7 +146,6 @@
             } catch (Exception e) {
                 return false;
             }
-
         }
         return true;
     }
@@ -157,9 +153,9 @@
     private void initParam(JobDetail jd) throws IOException {
         mPredicates = new ArrayList<>();
         jobInstance = jobInstanceRepo.findByPredicateName(jd.getJobDataMap()
-                .getString(PREDICATE_JOB_NAME));
+            .getString(PREDICATE_JOB_NAME));
         measure = toEntity(jd.getJobDataMap().getString(MEASURE_KEY),
-                GriffinMeasure.class);
+            GriffinMeasure.class);
         livyUri = env.getProperty("livy.uri");
         setPredicates(jd.getJobDataMap().getString(PREDICATES_KEY));
         // in order to keep metric name unique, we set job name
@@ -172,14 +168,11 @@
         if (StringUtils.isEmpty(json)) {
             return;
         }
-        List<Map<String, Object>> maps = toEntity(json,
-                new TypeReference<List<Map>>() {
-                });
-        for (Map<String, Object> map : maps) {
-            SegmentPredicate sp = new SegmentPredicate();
-            sp.setType((String) map.get("type"));
-            sp.setConfigMap((Map<String, Object>) map.get("config"));
-            mPredicates.add(sp);
+        List<SegmentPredicate> predicates = toEntity(json,
+            new TypeReference<List<SegmentPredicate>>() {
+            });
+        if (predicates != null) {
+            mPredicates.addAll(predicates);
         }
     }
 
@@ -205,7 +198,7 @@
         List<String> args = new ArrayList<>();
         args.add(genEnv());
         String measureJson = JsonUtil.toJsonWithFormat(measure);
-        // to fix livy bug: character ` will be ignored by livy
+        // to fix livy bug: character will be ignored by livy
         String finalMeasureJson = escapeCharacter(measureJson, "\\`");
         LOGGER.info(finalMeasureJson);
         args.add(finalMeasureJson);
@@ -213,24 +206,39 @@
         livyConfMap.put("args", args);
     }
 
-
-    private void saveJobInstance(JobDetail jd) throws SchedulerException,
-            IOException {
+    protected void saveJobInstance(JobDetail jd) throws SchedulerException,
+        IOException {
         // If result is null, it may livy uri is wrong
         // or livy parameter is wrong.
-        String result = post2Livy();
+        Map<String, Object> resultMap = post2LivyWithRetry();
         String group = jd.getKey().getGroup();
         String name = jd.getKey().getName();
         batchJobOp.deleteJob(group, name);
         LOGGER.info("Delete predicate job({},{}) SUCCESS.", group, name);
-        saveJobInstance(result, FOUND);
+        setJobInstance(resultMap, FOUND);
+        jobInstanceRepo.save(jobInstance);
     }
 
-    private void saveJobInstance(String result, State state)
-            throws IOException {
+    private Map<String, Object> post2LivyWithRetry()
+        throws IOException {
+        String result = post2Livy();
+        Map<String, Object> resultMap = null;
+        if (result != null) {
+            resultMap = livyTaskSubmitHelper.retryLivyGetAppId(result, appIdRetryCount);
+            if (resultMap != null) {
+                livyTaskSubmitHelper.increaseCurTaskNum(Long.valueOf(
+                    String.valueOf(resultMap.get("id"))).longValue());
+            }
+        }
+
+        return resultMap;
+    }
+
+    protected void saveJobInstance(String result, State state)
+        throws IOException {
         TypeReference<HashMap<String, Object>> type =
-                new TypeReference<HashMap<String, Object>>() {
-                };
+            new TypeReference<HashMap<String, Object>>() {
+            };
         Map<String, Object> resultMap = null;
         if (result != null) {
             resultMap = toEntity(result, type);
@@ -247,9 +255,9 @@
             Object id = resultMap.get("id");
             Object appId = resultMap.get("appId");
             jobInstance.setState(status == null ? null : State.valueOf(status
-                    .toString().toUpperCase()));
+                .toString().toUpperCase()));
             jobInstance.setSessionId(id == null ? null : Long.parseLong(id
-                    .toString()));
+                .toString()));
             jobInstance.setAppId(appId == null ? null : appId.toString());
         }
     }
diff --git a/service/src/main/java/org/apache/griffin/core/job/StreamingJobOperatorImpl.java b/service/src/main/java/org/apache/griffin/core/job/StreamingJobOperatorImpl.java
index 36b23ce..e3fab06 100644
--- a/service/src/main/java/org/apache/griffin/core/job/StreamingJobOperatorImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/job/StreamingJobOperatorImpl.java
@@ -56,12 +56,11 @@
 import org.springframework.util.CollectionUtils;
 import org.springframework.web.client.ResourceAccessException;
 import org.springframework.web.client.RestClientException;
-import org.springframework.web.client.RestTemplate;
 
 @Service
 public class StreamingJobOperatorImpl implements JobOperator {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(StreamingJobOperatorImpl.class);
+        .getLogger(StreamingJobOperatorImpl.class);
     @Autowired
     private StreamingJobRepo streamingJobRepo;
     @Autowired
@@ -72,20 +71,20 @@
     private JobInstanceRepo instanceRepo;
     @Autowired
     private SchedulerFactoryBean factory;
+    @Autowired
+    private LivyTaskSubmitHelper livyTaskSubmitHelper;
 
     private String livyUri;
-    private RestTemplate restTemplate;
 
     @PostConstruct
     public void init() {
-        restTemplate = new RestTemplate();
         livyUri = env.getProperty("livy.uri");
     }
 
     @Override
     @Transactional(rollbackFor = Exception.class)
     public AbstractJob add(AbstractJob job, GriffinMeasure measure) throws
-            Exception {
+        Exception {
         validateParams(job);
         String qName = jobService.getQuartzName(job);
         String qGroup = jobService.getQuartzGroup();
@@ -127,24 +126,24 @@
         /* Firstly you should check whether job is scheduled.
         If it is scheduled, triggers are empty. */
         List<? extends Trigger> triggers = jobService.getTriggers(
-                job.getName(),
-                job.getGroup());
+            job.getName(),
+            job.getGroup());
         if (!CollectionUtils.isEmpty(triggers)) {
             throw new GriffinException.BadRequestException
-                    (STREAMING_JOB_IS_RUNNING);
+                (STREAMING_JOB_IS_RUNNING);
         }
         /* Secondly you should check whether job instance is running. */
         List<JobInstanceBean> instances = instanceRepo.findByJobId(job.getId());
         instances.stream().filter(instance -> !instance.isDeleted()).forEach
-                (instance -> {
-                    State state = instance.getState();
-                    String quartzState = convert2QuartzState(state);
-                    if (!getStartStatus(quartzState)) {
-                        throw new GriffinException.BadRequestException
-                                (STREAMING_JOB_IS_RUNNING);
-                    }
-                    instance.setDeleted(true);
-                });
+            (instance -> {
+                State state = instance.getState();
+                String quartzState = convert2QuartzState(state);
+                if (!getStartStatus(quartzState)) {
+                    throw new GriffinException.BadRequestException
+                        (STREAMING_JOB_IS_RUNNING);
+                }
+                instance.setDeleted(true);
+            });
     }
 
 
@@ -175,7 +174,7 @@
     public JobState getState(AbstractJob job, String action) {
         JobState jobState = new JobState();
         List<JobInstanceBean> instances = instanceRepo
-                .findByJobId(job.getId());
+            .findByJobId(job.getId());
         for (JobInstanceBean instance : instances) {
             State state = instance.getState();
             if (!instance.isDeleted() && state != null) {
@@ -209,7 +208,7 @@
      * started
      */
     private boolean getStartStatus(String state) {
-        return !"NORMAL" .equals(state) && !"BLOCKED" .equals(state);
+        return !"NORMAL".equals(state) && !"BLOCKED".equals(state);
     }
 
     /**
@@ -220,30 +219,32 @@
      * stopped
      */
     private boolean getStopStatus(String state) {
-        return !"COMPLETE" .equals(state) && !"ERROR" .equals(state);
+        return !"COMPLETE".equals(state) && !"ERROR".equals(state);
     }
 
     private void deleteByLivy(JobInstanceBean instance) {
         Long sessionId = instance.getSessionId();
         if (sessionId == null) {
             LOGGER.warn("Session id of instance({},{}) is null.", instance
-                    .getPredicateGroup(), instance.getPredicateName
-                    ());
+                .getPredicateGroup(), instance.getPredicateName
+                ());
             return;
         }
         String url = livyUri + "/" + instance.getSessionId();
         try {
-            restTemplate.delete(url);
+            // Use livy helper to interact with livy
+            livyTaskSubmitHelper.deleteByLivy(url);
+
             LOGGER.info("Job instance({}) has been deleted. {}", instance
-                    .getSessionId(), url);
+                .getSessionId(), url);
         } catch (ResourceAccessException e) {
             LOGGER.error("Your url may be wrong. Please check {}.\n {}",
-                    livyUri, e.getMessage());
+                livyUri, e.getMessage());
         } catch (RestClientException e) {
             LOGGER.warn("sessionId({}) appId({}) {}.", instance.getSessionId(),
-                    instance.getAppId(), e.getMessage());
+                instance.getAppId(), e.getMessage());
             YarnNetUtil.delete(env.getProperty("yarn.uri"),
-                    instance.getAppId());
+                instance.getAppId());
         }
     }
 
@@ -254,23 +255,23 @@
      *               job
      */
     private void stop(StreamingJob job, boolean delete) throws
-            SchedulerException {
+        SchedulerException {
         pauseJob(job);
         /* to prevent situation that streaming job is submitted
         before pause or when pausing. */
         List<JobInstanceBean> instances = instanceRepo
-                .findByJobId(job.getId());
+            .findByJobId(job.getId());
         instances.stream().filter(instance -> !instance.isDeleted())
-                .forEach(instance -> {
-                    State state = instance.getState();
-                    String quartzState = convert2QuartzState(state);
-                    if (getStopStatus(quartzState)) {
-                        deleteByLivy(instance);
+            .forEach(instance -> {
+                State state = instance.getState();
+                String quartzState = convert2QuartzState(state);
+                if (getStopStatus(quartzState)) {
+                    deleteByLivy(instance);
 
-                    }
-                    instance.setState(STOPPED);
-                    instance.setDeleted(true);
-                });
+                }
+                instance.setState(STOPPED);
+                instance.setDeleted(true);
+            });
         job.setDeleted(delete);
         streamingJobRepo.save(job);
     }
diff --git a/service/src/main/java/org/apache/griffin/core/job/entity/AbstractJob.java b/service/src/main/java/org/apache/griffin/core/job/entity/AbstractJob.java
index 660eb27..1c48adf 100644
--- a/service/src/main/java/org/apache/griffin/core/job/entity/AbstractJob.java
+++ b/service/src/main/java/org/apache/griffin/core/job/entity/AbstractJob.java
@@ -57,20 +57,20 @@
 @Table(name = "job")
 @Inheritance(strategy = InheritanceType.SINGLE_TABLE)
 @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY,
-        property = "job.type")
+    property = "job.type")
 @JsonSubTypes({@JsonSubTypes.Type(value = BatchJob.class, name = "batch"),
-        @JsonSubTypes.Type(
-                value = StreamingJob.class,
-                name = "streaming"),
-        @JsonSubTypes.Type(
-                value = VirtualJob.class,
-                name = "virtual")})
+    @JsonSubTypes.Type(
+        value = StreamingJob.class,
+        name = "streaming"),
+    @JsonSubTypes.Type(
+        value = VirtualJob.class,
+        name = "virtual")})
 @DiscriminatorColumn(name = "type")
 public abstract class AbstractJob extends AbstractAuditableEntity {
     private static final long serialVersionUID = 7569493377868453677L;
 
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(AbstractJob.class);
+        .getLogger(AbstractJob.class);
 
     protected Long measureId;
 
@@ -107,7 +107,7 @@
 
     @NotNull
     @OneToMany(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST,
-            CascadeType.REMOVE, CascadeType.MERGE})
+        CascadeType.REMOVE, CascadeType.MERGE})
     @JoinColumn(name = "job_id")
     private List<JobDataSegment> segments = new ArrayList<>();
 
@@ -236,8 +236,8 @@
     public void load() throws IOException {
         if (!StringUtils.isEmpty(predicateConfig)) {
             this.configMap = JsonUtil.toEntity(predicateConfig,
-                    new TypeReference<Map<String, Object>>() {
-                    });
+                new TypeReference<Map<String, Object>>() {
+                });
         }
     }
 
@@ -265,7 +265,6 @@
         this.deleted = deleted;
     }
 
-
     AbstractJob(String jobName, Long measureId, String metricName) {
         this.jobName = jobName;
         this.measureId = measureId;
diff --git a/service/src/main/java/org/apache/griffin/core/job/entity/JobDataSegment.java b/service/src/main/java/org/apache/griffin/core/job/entity/JobDataSegment.java
index 2cc394f..2272e14 100644
--- a/service/src/main/java/org/apache/griffin/core/job/entity/JobDataSegment.java
+++ b/service/src/main/java/org/apache/griffin/core/job/entity/JobDataSegment.java
@@ -39,7 +39,7 @@
     private static final long serialVersionUID = -9056531122243340484L;
 
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(JobDataSegment.class);
+        .getLogger(JobDataSegment.class);
 
     @NotNull
     private String dataConnectorName;
@@ -47,7 +47,7 @@
     private boolean asTsBaseline = false;
 
     @OneToOne(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST,
-            CascadeType.REMOVE, CascadeType.MERGE})
+        CascadeType.REMOVE, CascadeType.MERGE})
     @JoinColumn(name = "segment_range_id")
     private SegmentRange segmentRange = new SegmentRange();
 
@@ -77,7 +77,7 @@
     public void setDataConnectorName(String dataConnectorName) {
         if (StringUtils.isEmpty(dataConnectorName)) {
             LOGGER.warn(" Data connector name is invalid. " +
-                    "Please check your connector name.");
+                "Please check your connector name.");
             throw new NullPointerException();
         }
         this.dataConnectorName = dataConnectorName;
diff --git a/service/src/main/java/org/apache/griffin/core/job/entity/JobInstanceBean.java b/service/src/main/java/org/apache/griffin/core/job/entity/JobInstanceBean.java
index 6fc5cf2..23b70a4 100644
--- a/service/src/main/java/org/apache/griffin/core/job/entity/JobInstanceBean.java
+++ b/service/src/main/java/org/apache/griffin/core/job/entity/JobInstanceBean.java
@@ -29,14 +29,17 @@
 import javax.persistence.Entity;
 import javax.persistence.EnumType;
 import javax.persistence.Enumerated;
+import javax.persistence.Index;
 import javax.persistence.JoinColumn;
 import javax.persistence.ManyToOne;
+import javax.persistence.Table;
 
 import org.apache.griffin.core.job.entity.LivySessionStates.State;
 import org.apache.griffin.core.measure.entity.AbstractAuditableEntity;
 import org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType;
 
 @Entity
+@Table(indexes = {@Index(columnList = "triggerKey")})
 public class JobInstanceBean extends AbstractAuditableEntity {
 
     private static final long serialVersionUID = -4748881017029815874L;
@@ -85,6 +88,8 @@
     @JsonIgnore
     private AbstractJob job;
 
+    private String triggerKey;
+
     public AbstractJob getJob() {
         return job;
     }
@@ -183,9 +188,24 @@
         this.deleted = deleted;
     }
 
+    public String getTriggerKey() {
+        return triggerKey;
+    }
+
+    public void setTriggerKey(String triggerKey) {
+        this.triggerKey = triggerKey;
+    }
+
     public JobInstanceBean() {
     }
 
+    public JobInstanceBean(State state, Long tms, Long expireTms, String appId) {
+        this.state = state;
+        this.tms = tms;
+        this.expireTms = expireTms;
+        this.appId = appId;
+    }
+
     public JobInstanceBean(State state, Long tms, Long expireTms) {
         this.state = state;
         this.tms = tms;
diff --git a/service/src/main/java/org/apache/griffin/core/job/entity/LivySessionStates.java b/service/src/main/java/org/apache/griffin/core/job/entity/LivySessionStates.java
index de122f5..2e892f6 100644
--- a/service/src/main/java/org/apache/griffin/core/job/entity/LivySessionStates.java
+++ b/service/src/main/java/org/apache/griffin/core/job/entity/LivySessionStates.java
@@ -127,7 +127,7 @@
 
     public static boolean isActive(State state) {
         if (UNKNOWN.equals(state) || STOPPED.equals(state) || NOT_FOUND.equals
-                (state) || FOUND.equals(state)) {
+            (state) || FOUND.equals(state)) {
             // set UNKNOWN isActive() as false.
             return false;
         } else if (FINDING.equals(state)) {
@@ -143,8 +143,8 @@
             return "COMPLETE";
         }
         if (UNKNOWN.equals(state) || NOT_FOUND.equals(state)
-                || FOUND.equals(state) || sessionState == null
-                || !sessionState.isActive()) {
+            || FOUND.equals(state) || sessionState == null
+            || !sessionState.isActive()) {
             return "ERROR";
         }
         return "NORMAL";
@@ -153,9 +153,9 @@
 
     public static boolean isHealthy(State state) {
         return !(State.ERROR.equals(state) || State.DEAD.equals(state)
-                || State.SHUTTING_DOWN.equals(state)
-                || State.FINDING.equals(state)
-                || State.NOT_FOUND.equals(state)
-                || State.FOUND.equals(state));
+            || State.SHUTTING_DOWN.equals(state)
+            || State.FINDING.equals(state)
+            || State.NOT_FOUND.equals(state)
+            || State.FOUND.equals(state));
     }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/job/entity/SegmentPredicate.java b/service/src/main/java/org/apache/griffin/core/job/entity/SegmentPredicate.java
index 138ab0f..843ad43 100644
--- a/service/src/main/java/org/apache/griffin/core/job/entity/SegmentPredicate.java
+++ b/service/src/main/java/org/apache/griffin/core/job/entity/SegmentPredicate.java
@@ -17,7 +17,6 @@
 under the License.
 */
 
-
 package org.apache.griffin.core.job.entity;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
@@ -87,16 +86,16 @@
     public void load() throws IOException {
         if (!StringUtils.isEmpty(config)) {
             this.configMap = JsonUtil.toEntity(config,
-                    new TypeReference<Map<String, Object>>() {
-                    });
+                new TypeReference<Map<String, Object>>() {
+                });
         }
     }
 
     public SegmentPredicate() {
     }
 
-    public SegmentPredicate(String type, Map<String, String> configMap) throws
-            JsonProcessingException {
+    public SegmentPredicate(String type, Map<String, String> configMap)
+        throws JsonProcessingException {
         this.type = type;
         this.config = JsonUtil.toJson(configMap);
     }
diff --git a/service/src/main/java/org/apache/griffin/core/job/entity/SegmentRange.java b/service/src/main/java/org/apache/griffin/core/job/entity/SegmentRange.java
index f9fcc72..a486f8c 100644
--- a/service/src/main/java/org/apache/griffin/core/job/entity/SegmentRange.java
+++ b/service/src/main/java/org/apache/griffin/core/job/entity/SegmentRange.java
@@ -17,7 +17,6 @@
 under the License.
 */
 
-
 package org.apache.griffin.core.job.entity;
 
 import javax.persistence.Column;
@@ -35,7 +34,6 @@
 
     private String length = "1h";
 
-
     public String getBegin() {
         return begin;
     }
diff --git a/service/src/main/java/org/apache/griffin/core/job/factory/AutowiringSpringBeanJobFactory.java b/service/src/main/java/org/apache/griffin/core/job/factory/AutowiringSpringBeanJobFactory.java
index 8bb3d60..78d1d78 100644
--- a/service/src/main/java/org/apache/griffin/core/job/factory/AutowiringSpringBeanJobFactory.java
+++ b/service/src/main/java/org/apache/griffin/core/job/factory/AutowiringSpringBeanJobFactory.java
@@ -27,10 +27,18 @@
 import org.springframework.context.ApplicationContextAware;
 import org.springframework.scheduling.quartz.SpringBeanJobFactory;
 
+/**
+ * Auto-wiring SpringBeanJobFactory is special  BeanJobFactory that adds auto-wiring support against
+ * {@link SpringBeanJobFactory} allowing you to inject properties from the scheduler context, job data map
+ * and trigger data entries into the job bean.
+ *
+ * @see SpringBeanJobFactory
+ * @see ApplicationContextAware
+ */
 public final class AutowiringSpringBeanJobFactory extends SpringBeanJobFactory
-        implements ApplicationContextAware {
+    implements ApplicationContextAware {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(AutowiringSpringBeanJobFactory.class);
+        .getLogger(AutowiringSpringBeanJobFactory.class);
 
     private transient AutowireCapableBeanFactory beanFactory;
 
@@ -41,12 +49,10 @@
 
     @Override
     protected Object createJobInstance(final TriggerFiredBundle bundle) {
-
         try {
             final Object job = super.createJobInstance(bundle);
             beanFactory.autowireBean(job);
             return job;
-
         } catch (Exception e) {
             LOGGER.error("fail to create job instance. {}", e);
         }
diff --git a/service/src/main/java/org/apache/griffin/core/job/factory/PredicatorFactory.java b/service/src/main/java/org/apache/griffin/core/job/factory/PredicatorFactory.java
index d65e49d..7f16ef7 100644
--- a/service/src/main/java/org/apache/griffin/core/job/factory/PredicatorFactory.java
+++ b/service/src/main/java/org/apache/griffin/core/job/factory/PredicatorFactory.java
@@ -19,6 +19,12 @@
 
 package org.apache.griffin.core.job.factory;
 
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.PREDICATE_TYPE_NOT_FOUND;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+
+import org.apache.griffin.core.exception.GriffinException;
 import org.apache.griffin.core.job.FileExistPredicator;
 import org.apache.griffin.core.job.Predicator;
 import org.apache.griffin.core.job.entity.SegmentPredicate;
@@ -27,17 +33,43 @@
 
 public class PredicatorFactory {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(PredicatorFactory.class);
+        .getLogger(PredicatorFactory.class);
 
     public static Predicator newPredicateInstance(SegmentPredicate segPredicate) {
-        Predicator predicate = null;
+        Predicator predicate;
         switch (segPredicate.getType()) {
             case "file.exist":
                 predicate = new FileExistPredicator(segPredicate);
                 break;
-            default:
-                LOGGER.warn("There is no predicate type that you input.");
+            case "custom":
+                predicate = getPredicateBean(segPredicate);
                 break;
+            default:
+                throw new GriffinException.NotFoundException(PREDICATE_TYPE_NOT_FOUND);
+        }
+        return predicate;
+    }
+
+    private static Predicator getPredicateBean(SegmentPredicate segmentPredicate) {
+        Predicator predicate;
+        String predicateClassName = (String) segmentPredicate.getConfigMap().get("class");
+        try {
+            Class clazz = Class.forName(predicateClassName);
+            Constructor<Predicator> constructor = clazz.getConstructor(SegmentPredicate.class);
+            predicate = constructor.newInstance(segmentPredicate);
+        } catch (ClassNotFoundException e) {
+            String message = "There is no predicate type that you input.";
+            LOGGER.error(message, e);
+            throw new GriffinException.ServiceException(message, e);
+        } catch (NoSuchMethodException e) {
+            String message = "For predicate with type " + predicateClassName +
+                " constructor with parameter of type " + SegmentPredicate.class.getName() + " not found";
+            LOGGER.error(message, e);
+            throw new GriffinException.ServiceException(message, e);
+        } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
+            String message = "Error creating predicate bean";
+            LOGGER.error(message, e);
+            throw new GriffinException.ServiceException(message, e);
         }
         return predicate;
     }
diff --git a/service/src/main/java/org/apache/griffin/core/job/repo/JobInstanceRepo.java b/service/src/main/java/org/apache/griffin/core/job/repo/JobInstanceRepo.java
index aa932e9..4db291d 100644
--- a/service/src/main/java/org/apache/griffin/core/job/repo/JobInstanceRepo.java
+++ b/service/src/main/java/org/apache/griffin/core/job/repo/JobInstanceRepo.java
@@ -30,10 +30,13 @@
 import org.springframework.transaction.annotation.Transactional;
 
 public interface JobInstanceRepo
-        extends CrudRepository<JobInstanceBean, Long> {
+    extends CrudRepository<JobInstanceBean, Long> {
 
     JobInstanceBean findByPredicateName(String name);
 
+    @Query("select s from JobInstanceBean s where s.id = ?1")
+    JobInstanceBean findByInstanceId(Long id);
+
     @Query("select s from JobInstanceBean s where s.job.id = ?1")
     List<JobInstanceBean> findByJobId(Long jobId, Pageable pageable);
 
@@ -45,9 +48,11 @@
     @Transactional(rollbackFor = Exception.class)
     @Modifying
     @Query("delete from JobInstanceBean j " +
-            "where j.expireTms <= ?1 and j.deleted = false ")
+        "where j.expireTms <= ?1 and j.deleted = false ")
     int deleteByExpireTimestamp(Long expireTms);
 
     @Query("select DISTINCT s from JobInstanceBean s where s.state in ?1")
     List<JobInstanceBean> findByActiveState(State[] states);
+
+    List<JobInstanceBean> findByTriggerKey(String triggerKey);
 }
diff --git a/service/src/main/java/org/apache/griffin/core/job/repo/JobRepo.java b/service/src/main/java/org/apache/griffin/core/job/repo/JobRepo.java
index 75f8048..a9bfbe2 100644
--- a/service/src/main/java/org/apache/griffin/core/job/repo/JobRepo.java
+++ b/service/src/main/java/org/apache/griffin/core/job/repo/JobRepo.java
@@ -28,7 +28,7 @@
 public interface JobRepo<T extends AbstractJob> extends CrudRepository<T, Long> {
 
     @Query("select count(j) from #{#entityName} j " +
-            "where j.jobName = ?1 and j.deleted = ?2")
+        "where j.jobName = ?1 and j.deleted = ?2")
     int countByJobNameAndDeleted(String jobName, Boolean deleted);
 
     List<T> findByDeleted(boolean deleted);
diff --git a/service/src/main/java/org/apache/griffin/core/login/LoginController.java b/service/src/main/java/org/apache/griffin/core/login/LoginController.java
index 3d08551..51b4cae 100644
--- a/service/src/main/java/org/apache/griffin/core/login/LoginController.java
+++ b/service/src/main/java/org/apache/griffin/core/login/LoginController.java
@@ -37,7 +37,7 @@
 
     @RequestMapping(value = "/authenticate", method = RequestMethod.POST)
     public ResponseEntity<Map<String, Object>> login(
-            @RequestBody Map<String, String> map) {
+        @RequestBody Map<String, String> map) {
         return loginService.login(map);
     }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/login/LoginService.java b/service/src/main/java/org/apache/griffin/core/login/LoginService.java
index 11c3a3d..3c78bb1 100644
--- a/service/src/main/java/org/apache/griffin/core/login/LoginService.java
+++ b/service/src/main/java/org/apache/griffin/core/login/LoginService.java
@@ -23,6 +23,14 @@
 
 import org.springframework.http.ResponseEntity;
 
+/**
+ * LoginService defines an abstract validation method for login action, you can implement
+ * it to customize authentication business.
+ *
+ * @see org.apache.griffin.core.config.LoginConfig
+ * @see LoginServiceDefaultImpl
+ * @see LoginServiceLdapImpl
+ */
 public interface LoginService {
 
     ResponseEntity<Map<String, Object>> login(Map<String, String> map);
diff --git a/service/src/main/java/org/apache/griffin/core/login/LoginServiceDefaultImpl.java b/service/src/main/java/org/apache/griffin/core/login/LoginServiceDefaultImpl.java
index fc54ea4..fc4041c 100644
--- a/service/src/main/java/org/apache/griffin/core/login/LoginServiceDefaultImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/login/LoginServiceDefaultImpl.java
@@ -34,10 +34,9 @@
         if (StringUtils.isBlank(username)) {
             username = "Anonymous";
         }
-        String fullName = username;
         Map<String, Object> message = new HashMap<>();
         message.put("ntAccount", username);
-        message.put("fullName", fullName);
+        message.put("fullName", username);
         message.put("status", 0);
         return new ResponseEntity<>(message, HttpStatus.OK);
     }
diff --git a/service/src/main/java/org/apache/griffin/core/login/LoginServiceLdapImpl.java b/service/src/main/java/org/apache/griffin/core/login/LoginServiceLdapImpl.java
index 56f6765..1389d12 100644
--- a/service/src/main/java/org/apache/griffin/core/login/LoginServiceLdapImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/login/LoginServiceLdapImpl.java
@@ -43,10 +43,10 @@
 
 public class LoginServiceLdapImpl implements LoginService {
     private static final Logger LOGGER = LoggerFactory.getLogger
-            (LoginServiceLdapImpl.class);
+        (LoginServiceLdapImpl.class);
 
     private static final String LDAP_FACTORY =
-            "com.sun.jndi.ldap.LdapCtxFactory";
+        "com.sun.jndi.ldap.LdapCtxFactory";
 
     private String url;
     private String email;
@@ -86,7 +86,7 @@
             ctx = getContextInstance(toPrincipal(bindAccount), bindPassword);
 
             NamingEnumeration<SearchResult> results = ctx.search(searchBase,
-                    searchFilter, searchControls);
+                searchFilter, searchControls);
             SearchResult userObject = getSingleUser(results);
 
             // verify password if different bind user is used
@@ -102,7 +102,7 @@
             return new ResponseEntity<>(message, HttpStatus.OK);
         } catch (AuthenticationException e) {
             LOGGER.warn("User {} failed to login with LDAP auth. {}", username,
-                    e.getMessage());
+                e.getMessage());
         } catch (NamingException e) {
             LOGGER.warn(String.format("User %s failed to login with LDAP auth.", username), e);
         } finally {
@@ -129,7 +129,7 @@
         if (results.hasMoreElements()) {
             SearchResult second = results.nextElement();
             throw new NamingException(String.format("Ambiguous search, found two users: %s, %s",
-                    result.getNameInNamespace(), second.getNameInNamespace()));
+                result.getNameInNamespace(), second.getNameInNamespace()));
         }
         return result;
     }
@@ -161,7 +161,7 @@
             }
         } catch (NamingException e) {
             LOGGER.warn("User {} successfully login with LDAP auth, " +
-                    "but failed to get full name.", ntAccount);
+                "but failed to get full name.", ntAccount);
             return ntAccount;
         }
     }
@@ -175,7 +175,7 @@
     }
 
     private LdapContext getContextInstance(String principal, String password)
-            throws NamingException {
+        throws NamingException {
         Hashtable<String, String> ht = new Hashtable<>();
         ht.put(Context.INITIAL_CONTEXT_FACTORY, LDAP_FACTORY);
         ht.put(Context.PROVIDER_URL, url);
diff --git a/service/src/main/java/org/apache/griffin/core/login/ldap/SelfSignedSocketFactory.java b/service/src/main/java/org/apache/griffin/core/login/ldap/SelfSignedSocketFactory.java
index 2daa2c3..028d878 100644
--- a/service/src/main/java/org/apache/griffin/core/login/ldap/SelfSignedSocketFactory.java
+++ b/service/src/main/java/org/apache/griffin/core/login/ldap/SelfSignedSocketFactory.java
@@ -19,23 +19,23 @@
 
 package org.apache.griffin.core.login.ldap;
 
-import org.apache.griffin.core.exception.GriffinException;
-
-import javax.net.SocketFactory;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.SSLSocketFactory;
-import javax.net.ssl.TrustManager;
-import javax.net.ssl.X509TrustManager;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.Socket;
 import java.net.UnknownHostException;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
+import javax.net.SocketFactory;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLSocketFactory;
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.X509TrustManager;
+
+import org.apache.griffin.core.exception.GriffinException;
 
 /**
  * SocketFactory ignoring insecure (self-signed, expired) certificates.
- *
+ * <p>
  * Maintains internal {@code SSLSocketFactory} configured with {@code NoopTrustManager}.
  * All SocketFactory methods are proxied to internal SSLSocketFactory instance.
  * Accepts all client and server certificates, from any issuers.
diff --git a/service/src/main/java/org/apache/griffin/core/measure/ExternalMeasureOperatorImpl.java b/service/src/main/java/org/apache/griffin/core/measure/ExternalMeasureOperatorImpl.java
index c941f28..dda373a 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/ExternalMeasureOperatorImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/ExternalMeasureOperatorImpl.java
@@ -55,9 +55,9 @@
         ExternalMeasure latestMeasure = (ExternalMeasure) measure;
         validateMeasure(latestMeasure);
         ExternalMeasure originMeasure = measureRepo.findOne(
-                latestMeasure.getId());
+            latestMeasure.getId());
         VirtualJob vj = genVirtualJob(latestMeasure,
-                originMeasure.getVirtualJob());
+            originMeasure.getVirtualJob());
         latestMeasure.setVirtualJob(vj);
         measure = measureRepo.save(latestMeasure);
         return measure;
diff --git a/service/src/main/java/org/apache/griffin/core/measure/MeasureController.java b/service/src/main/java/org/apache/griffin/core/measure/MeasureController.java
index 3c5e0b0..a6921bf 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/MeasureController.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/MeasureController.java
@@ -42,7 +42,7 @@
 
     @RequestMapping(value = "/measures", method = RequestMethod.GET)
     public List<? extends Measure> getAllAliveMeasures(@RequestParam(value =
-            "type", defaultValue = "") String type) {
+        "type", defaultValue = "") String type) {
         return measureService.getAllAliveMeasures(type);
     }
 
@@ -54,7 +54,7 @@
     @RequestMapping(value = "/measures/{id}", method = RequestMethod.DELETE)
     @ResponseStatus(HttpStatus.NO_CONTENT)
     public void deleteMeasureById(@PathVariable("id") Long id) throws
-            SchedulerException {
+        SchedulerException {
         measureService.deleteMeasureById(id);
     }
 
@@ -71,7 +71,7 @@
     }
 
     @RequestMapping(value = "/measures/owner/{owner}", method =
-            RequestMethod.GET)
+        RequestMethod.GET)
     public List<Measure> getAliveMeasuresByOwner(@PathVariable("owner")
                                                  @Valid String owner) {
         return measureService.getAliveMeasuresByOwner(owner);
diff --git a/service/src/main/java/org/apache/griffin/core/measure/MeasureOrgService.java b/service/src/main/java/org/apache/griffin/core/measure/MeasureOrgService.java
index accf1a5..4801ec2 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/MeasureOrgService.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/MeasureOrgService.java
@@ -32,6 +32,6 @@
 
     Map<String, Map<String, List<Map<String, Object>>>>
     getMeasureWithJobDetailsGroupByOrg(Map<String,
-            List<Map<String, Object>>>
-                                               jobDetailsGroupByMeasure);
+        List<Map<String, Object>>>
+                                           jobDetailsGroupByMeasure);
 }
diff --git a/service/src/main/java/org/apache/griffin/core/measure/MeasureOrgServiceImpl.java b/service/src/main/java/org/apache/griffin/core/measure/MeasureOrgServiceImpl.java
index 64ec06a..13344c9 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/MeasureOrgServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/MeasureOrgServiceImpl.java
@@ -50,7 +50,7 @@
         List<String> orgs = measureRepo.findNameByOrganization(org, false);
         if (CollectionUtils.isEmpty(orgs)) {
             throw new GriffinException.NotFoundException
-                    (ORGANIZATION_NAME_DOES_NOT_EXIST);
+                (ORGANIZATION_NAME_DOES_NOT_EXIST);
         }
         return orgs;
     }
@@ -64,7 +64,7 @@
             orgName = orgName == null ? "null" : orgName;
             String measureName = measure.getName();
             List<String> measureList = orgWithMetricsMap.getOrDefault(orgName,
-                    new ArrayList<>());
+                new ArrayList<>());
             measureList.add(measureName);
             orgWithMetricsMap.put(orgName, measureList);
         }
@@ -74,9 +74,9 @@
     @Override
     public Map<String, Map<String, List<Map<String, Object>>>>
     getMeasureWithJobDetailsGroupByOrg(Map<String,
-            List<Map<String, Object>>> jobDetails) {
+        List<Map<String, Object>>> jobDetails) {
         Map<String, Map<String, List<Map<String, Object>>>> result =
-                new HashMap<>();
+            new HashMap<>();
         List<GriffinMeasure> measures = measureRepo.findByDeleted(false);
         if (measures == null) {
             return null;
@@ -86,9 +86,9 @@
             String measureName = measure.getName();
             String measureId = measure.getId().toString();
             List<Map<String, Object>> jobList = jobDetails
-                    .getOrDefault(measureId, new ArrayList<>());
+                .getOrDefault(measureId, new ArrayList<>());
             Map<String, List<Map<String, Object>>> measureWithJobs = result
-                    .getOrDefault(orgName, new HashMap<>());
+                .getOrDefault(orgName, new HashMap<>());
             measureWithJobs.put(measureName, jobList);
             result.put(orgName, measureWithJobs);
         }
diff --git a/service/src/main/java/org/apache/griffin/core/measure/MeasureServiceImpl.java b/service/src/main/java/org/apache/griffin/core/measure/MeasureServiceImpl.java
index 1ea945e..97e8117 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/MeasureServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/MeasureServiceImpl.java
@@ -45,7 +45,7 @@
 @Service
 public class MeasureServiceImpl implements MeasureService {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(MeasureServiceImpl.class);
+        .getLogger(MeasureServiceImpl.class);
     private static final String GRIFFIN = "griffin";
     private static final String EXTERNAL = "external";
 
@@ -77,7 +77,7 @@
         Measure measure = measureRepo.findByIdAndDeleted(id, false);
         if (measure == null) {
             throw new GriffinException
-                    .NotFoundException(MEASURE_ID_DOES_NOT_EXIST);
+                .NotFoundException(MEASURE_ID_DOES_NOT_EXIST);
         }
         return measure;
     }
@@ -90,12 +90,12 @@
     @Override
     public Measure createMeasure(Measure measure) {
         List<Measure> aliveMeasureList = measureRepo
-                .findByNameAndDeleted(measure.getName(), false);
+            .findByNameAndDeleted(measure.getName(), false);
         if (!CollectionUtils.isEmpty(aliveMeasureList)) {
             LOGGER.warn("Failed to create new measure {}, it already exists.",
-                    measure.getName());
+                measure.getName());
             throw new GriffinException.ConflictException(
-                    MEASURE_NAME_ALREADY_EXIST);
+                MEASURE_NAME_ALREADY_EXIST);
         }
         MeasureOperator op = getOperation(measure);
         return op.create(measure);
@@ -106,12 +106,12 @@
         Measure m = measureRepo.findByIdAndDeleted(measure.getId(), false);
         if (m == null) {
             throw new GriffinException.NotFoundException(
-                    MEASURE_ID_DOES_NOT_EXIST);
+                MEASURE_ID_DOES_NOT_EXIST);
         }
         if (!m.getType().equals(measure.getType())) {
             LOGGER.warn("Can't update measure to different type.");
             throw new GriffinException.BadRequestException(
-                    MEASURE_TYPE_DOES_NOT_MATCH);
+                MEASURE_TYPE_DOES_NOT_MATCH);
         }
         MeasureOperator op = getOperation(measure);
         return op.update(measure);
@@ -122,7 +122,7 @@
         Measure measure = measureRepo.findByIdAndDeleted(measureId, false);
         if (measure == null) {
             throw new GriffinException.NotFoundException(
-                    MEASURE_ID_DOES_NOT_EXIST);
+                MEASURE_ID_DOES_NOT_EXIST);
         }
         MeasureOperator op = getOperation(measure);
         op.delete(measure);
@@ -144,7 +144,7 @@
             return externalOp;
         }
         throw new GriffinException.BadRequestException(
-                MEASURE_TYPE_DOES_NOT_SUPPORT);
+            MEASURE_TYPE_DOES_NOT_SUPPORT);
     }
 
 }
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/AbstractAuditableEntity.java b/service/src/main/java/org/apache/griffin/core/measure/entity/AbstractAuditableEntity.java
index cb5a399..5bcf9fa 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/AbstractAuditableEntity.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/AbstractAuditableEntity.java
@@ -28,6 +28,10 @@
 import javax.persistence.Id;
 import javax.persistence.MappedSuperclass;
 
+/**
+ * AbstractAuditableEntity is base entity class definition in Apache Griffin, all
+ * {@link javax.persistence.Entity} classes should extend it.
+ */
 @MappedSuperclass
 public abstract class AbstractAuditableEntity implements Serializable {
 
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/DataConnector.java b/service/src/main/java/org/apache/griffin/core/measure/entity/DataConnector.java
index 70a6b03..87ddf06 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/DataConnector.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/DataConnector.java
@@ -30,6 +30,7 @@
 import java.util.List;
 import java.util.Map;
 import javax.persistence.CascadeType;
+import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.EnumType;
 import javax.persistence.Enumerated;
@@ -88,6 +89,7 @@
     private String defaultDataUnit = "365000d";
 
     @JsonIgnore
+    @Column(length = 20480)
     private String config;
 
     @Transient
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/DataSource.java b/service/src/main/java/org/apache/griffin/core/measure/entity/DataSource.java
index f0a0038..970977b 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/DataSource.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/DataSource.java
@@ -52,7 +52,7 @@
     private String name;
 
     @OneToMany(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST,
-            CascadeType.REMOVE, CascadeType.MERGE})
+        CascadeType.REMOVE, CascadeType.MERGE})
     @JoinColumn(name = "data_source_id")
     private List<DataConnector> connectors = new ArrayList<>();
 
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/EvaluateRule.java b/service/src/main/java/org/apache/griffin/core/measure/entity/EvaluateRule.java
index 9584800..1a9c452 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/EvaluateRule.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/EvaluateRule.java
@@ -27,6 +27,7 @@
 import javax.persistence.FetchType;
 import javax.persistence.JoinColumn;
 import javax.persistence.OneToMany;
+import javax.persistence.OrderBy;
 
 
 @Entity
@@ -34,8 +35,9 @@
     private static final long serialVersionUID = 4240072518233967528L;
 
     @OneToMany(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST,
-            CascadeType.REMOVE, CascadeType.MERGE})
+        CascadeType.REMOVE, CascadeType.MERGE})
     @JoinColumn(name = "evaluate_rule_id")
+    @OrderBy("id ASC")
     private List<Rule> rules = new ArrayList<>();
 
     public List<Rule> getRules() {
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/GriffinMeasure.java b/service/src/main/java/org/apache/griffin/core/measure/entity/GriffinMeasure.java
index e2e247e..f09cac3 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/GriffinMeasure.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/GriffinMeasure.java
@@ -182,6 +182,7 @@
     @PrePersist
     @PreUpdate
     public void save() throws JsonProcessingException {
+        super.save();
         if (ruleDescriptionMap != null) {
             this.ruleDescription = JsonUtil.toJson(ruleDescriptionMap);
         }
@@ -189,6 +190,7 @@
 
     @PostLoad
     public void load() throws IOException {
+        super.load();
         if (!StringUtils.isEmpty(ruleDescription)) {
             this.ruleDescriptionMap = JsonUtil.toEntity(ruleDescription,
                 new TypeReference<Map<String, Object>>() {
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/Measure.java b/service/src/main/java/org/apache/griffin/core/measure/entity/Measure.java
index 2032c1d..9949949 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/Measure.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/Measure.java
@@ -19,26 +19,38 @@
 
 package org.apache.griffin.core.measure.entity;
 
-import com.fasterxml.jackson.annotation.*;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.core.type.TypeReference;
-import org.apache.commons.lang.StringUtils;
-import org.apache.griffin.core.util.JsonUtil;
 
-import javax.persistence.*;
-import javax.validation.constraints.NotNull;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
-import java.util.Map;
+import javax.persistence.Entity;
+import javax.persistence.EnumType;
+import javax.persistence.Enumerated;
+import javax.persistence.Inheritance;
+import javax.persistence.InheritanceType;
+import javax.persistence.PostLoad;
+import javax.persistence.PrePersist;
+import javax.persistence.PreUpdate;
+import javax.persistence.Transient;
+import javax.validation.constraints.NotNull;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.griffin.core.util.JsonUtil;
 
 @Entity
 @Inheritance(strategy = InheritanceType.JOINED)
 @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY,
-        property = "measure.type")
+    property = "measure.type")
 @JsonSubTypes({
-        @JsonSubTypes.Type(value = GriffinMeasure.class, name = "griffin"),
-        @JsonSubTypes.Type(value = ExternalMeasure.class, name = "external")})
+    @JsonSubTypes.Type(value = GriffinMeasure.class, name = "griffin"),
+    @JsonSubTypes.Type(value = ExternalMeasure.class, name = "external")})
 public abstract class Measure extends AbstractAuditableEntity {
     private static final long serialVersionUID = -4748881017029815714L;
 
@@ -137,8 +149,6 @@
     public void save() throws JsonProcessingException {
         if (sinksList != null) {
             this.sinks = JsonUtil.toJson(sinksList);
-        } else {
-            this.sinks = null;
         }
     }
 
@@ -147,8 +157,6 @@
         if (!StringUtils.isEmpty(sinks)) {
             this.sinksList = JsonUtil.toEntity(sinks, new TypeReference<List<String>>() {
             });
-        } else {
-            this.sinksList = null;
         }
     }
 
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/Rule.java b/service/src/main/java/org/apache/griffin/core/measure/entity/Rule.java
index 3792e4d..de3a9b2 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/Rule.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/Rule.java
@@ -186,12 +186,12 @@
         if (!StringUtils.isEmpty(details)) {
             this.detailsMap = JsonUtil.toEntity(
                 details, new TypeReference<Map<String, Object>>() {
-            });
+                });
         }
         if (!StringUtils.isEmpty(out)) {
             this.outList = JsonUtil.toEntity(
                 out, new TypeReference<List<Map<String, Object>>>() {
-            });
+                });
         }
     }
 
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/StreamingPreProcess.java b/service/src/main/java/org/apache/griffin/core/measure/entity/StreamingPreProcess.java
index 963304b..1387c7e 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/StreamingPreProcess.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/StreamingPreProcess.java
@@ -122,8 +122,8 @@
     public void load() throws IOException {
         if (!StringUtils.isEmpty(details)) {
             this.detailsMap = JsonUtil.toEntity(details,
-                    new TypeReference<Map<String, Object>>() {
-                    });
+                new TypeReference<Map<String, Object>>() {
+                });
         }
     }
 
diff --git a/service/src/main/java/org/apache/griffin/core/measure/repo/MeasureRepo.java b/service/src/main/java/org/apache/griffin/core/measure/repo/MeasureRepo.java
index eb24b1d..a96415c 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/repo/MeasureRepo.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/repo/MeasureRepo.java
@@ -32,7 +32,7 @@
  * @param <T> Measure and its subclass
  */
 public interface MeasureRepo<T extends Measure>
-        extends CrudRepository<T, Long> {
+    extends CrudRepository<T, Long> {
 
     /**
      * search repository by name and deletion state
@@ -76,7 +76,7 @@
      * @return organization collection
      */
     @Query("select DISTINCT m.organization from #{#entityName} m "
-            + "where m.deleted = ?1 and m.organization is not null")
+        + "where m.deleted = ?1 and m.organization is not null")
     List<String> findOrganizations(Boolean deleted);
 
     /**
@@ -87,6 +87,6 @@
      * @return organization collection
      */
     @Query("select m.name from #{#entityName} m "
-            + "where m.organization= ?1 and m.deleted= ?2")
+        + "where m.organization= ?1 and m.deleted= ?2")
     List<String> findNameByOrganization(String organization, Boolean deleted);
 }
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java
index ba425ec..175347f 100644
--- a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java
@@ -24,6 +24,7 @@
 
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -34,6 +35,7 @@
 public class HiveMetaStoreController {
 
     @Autowired
+    @Qualifier(value = "metastoreSvc")
     private HiveMetaStoreService hiveMetaStoreService;
 
     @RequestMapping(value = "/dbs", method = RequestMethod.GET)
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreProxy.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreProxy.java
index adb71e9..f9c1236 100644
--- a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreProxy.java
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreProxy.java
@@ -33,7 +33,7 @@
 @Component
 public class HiveMetaStoreProxy {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(HiveMetaStoreProxy.class);
+        .getLogger(HiveMetaStoreProxy.class);
 
     @Value("${hive.metastore.uris}")
     private String uris;
@@ -60,7 +60,7 @@
         HiveConf hiveConf = new HiveConf();
         hiveConf.set("hive.metastore.local", "false");
         hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES,
-                3);
+            3);
         hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, uris);
         hiveConf.setIntVar(HiveConf.ConfVars.HMSHANDLERATTEMPTS, attempts);
         hiveConf.setVar(HiveConf.ConfVars.HMSHANDLERINTERVAL, interval);
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java
index 48a78a4..d855183 100644
--- a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java
@@ -19,17 +19,19 @@
 
 package org.apache.griffin.core.metastore.hive;
 
+import com.google.common.collect.Lists;
+
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import com.google.common.collect.Lists;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.cache.annotation.CacheConfig;
 import org.springframework.cache.annotation.CacheEvict;
@@ -40,11 +42,12 @@
 
 
 @Service
+@Qualifier(value = "metastoreSvc")
 @CacheConfig(cacheNames = "hive", keyGenerator = "cacheKeyGenerator")
 public class HiveMetaStoreServiceImpl implements HiveMetaStoreService {
 
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(HiveMetaStoreService.class);
+        .getLogger(HiveMetaStoreService.class);
 
     @Autowired
     private IMetaStoreClient client = null;
@@ -56,6 +59,10 @@
     public HiveMetaStoreServiceImpl() {
     }
 
+    public void setClient(IMetaStoreClient client) {
+        this.client = client;
+    }
+
     @Override
     @Cacheable(unless = "#result==null")
     public Iterable<String> getAllDatabases() {
@@ -63,7 +70,7 @@
         try {
             if (client == null) {
                 LOGGER.warn("Hive client is null. " +
-                        "Please check your hive config.");
+                    "Please check your hive config.");
                 return new ArrayList<>();
             }
             results = client.getAllDatabases();
@@ -82,7 +89,7 @@
         try {
             if (client == null) {
                 LOGGER.warn("Hive client is null. " +
-                        "Please check your hive config.");
+                    "Please check your hive config.");
                 return new ArrayList<>();
             }
             results = client.getAllTables(getUseDbName(dbName));
@@ -100,17 +107,17 @@
     public List<Table> getAllTable(String db) {
         return getTables(db);
     }
-    
+
     @Override
     @Cacheable(unless = "#result==null || #result.isEmpty()")
     public Map<String, List<String>> getAllTableNames() {
         Map<String, List<String>> result = new HashMap<>();
-        for (String dbName: getAllDatabases()) {
+        for (String dbName : getAllDatabases()) {
             result.put(dbName, Lists.newArrayList(getAllTableNames(dbName)));
         }
         return result;
     }
-    
+
     @Override
     @Cacheable(unless = "#result==null")
     public Map<String, List<Table>> getAllTable() {
@@ -136,30 +143,30 @@
 
 
     @Override
-    @Cacheable(unless="#result==null")
+    @Cacheable(unless = "#result==null")
     public Table getTable(String dbName, String tableName) {
         Table result = null;
         try {
             if (client == null) {
                 LOGGER.warn("Hive client is null. " +
-                        "Please check your hive config.");
+                    "Please check your hive config.");
                 return null;
             }
             result = client.getTable(getUseDbName(dbName), tableName);
         } catch (Exception e) {
             reconnect();
             LOGGER.error("Exception fetching table info : {}. {}", tableName,
-                    e);
+                e);
         }
         return result;
     }
 
     @Scheduled(fixedRateString =
-            "${cache.evict.hive.fixedRate.in.milliseconds}")
+        "${cache.evict.hive.fixedRate.in.milliseconds}")
     @CacheEvict(
-            cacheNames = "hive",
-            allEntries = true,
-            beforeInvocation = true)
+        cacheNames = "hive",
+        allEntries = true,
+        beforeInvocation = true)
     public void evictHiveCache() {
         LOGGER.info("Evict hive cache");
         // TODO: calls within same bean are not cached -- this call is not populating anything
@@ -175,7 +182,7 @@
         try {
             if (client == null) {
                 LOGGER.warn("Hive client is null. " +
-                        "Please check your hive config.");
+                    "Please check your hive config.");
                 return allTables;
             }
             Iterable<String> tables = client.getAllTables(useDbName);
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceJdbcImpl.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceJdbcImpl.java
new file mode 100644
index 0000000..0df028f
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceJdbcImpl.java
@@ -0,0 +1,343 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import javax.annotation.PostConstruct;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.cache.annotation.CacheConfig;
+import org.springframework.cache.annotation.CacheEvict;
+import org.springframework.cache.annotation.Cacheable;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Service;
+
+
+@Service
+@Qualifier(value = "jdbcSvc")
+@CacheConfig(cacheNames = "jdbcHive", keyGenerator = "cacheKeyGenerator")
+public class HiveMetaStoreServiceJdbcImpl implements HiveMetaStoreService {
+
+    private static final Logger LOGGER = LoggerFactory
+        .getLogger(HiveMetaStoreService.class);
+
+    private static final String SHOW_TABLES_IN = "show tables in ";
+
+    private static final String SHOW_DATABASE = "show databases";
+
+    private static final String SHOW_CREATE_TABLE = "show create table ";
+
+    @Value("${hive.jdbc.className}")
+    private String hiveClassName;
+
+    @Value("${hive.jdbc.url}")
+    private String hiveUrl;
+
+    @Value("${hive.need.kerberos}")
+    private String needKerberos;
+
+    @Value("${hive.keytab.user}")
+    private String keytabUser;
+
+    @Value("${hive.keytab.path}")
+    private String keytabPath;
+
+    private Connection conn;
+
+    public void setConn(Connection conn) {
+        this.conn = conn;
+    }
+
+    public void setHiveClassName(String hiveClassName) {
+        this.hiveClassName = hiveClassName;
+    }
+
+    public void setNeedKerberos(String needKerberos) {
+        this.needKerberos = needKerberos;
+    }
+
+    public void setKeytabUser(String keytabUser) {
+        this.keytabUser = keytabUser;
+    }
+
+    public void setKeytabPath(String keytabPath) {
+        this.keytabPath = keytabPath;
+    }
+
+    @PostConstruct
+    public void init() {
+        if (needKerberos != null && needKerberos.equalsIgnoreCase("true")) {
+            LOGGER.info("Hive need Kerberos Auth.");
+
+            Configuration conf = new Configuration();
+            conf.set("hadoop.security.authentication", "Kerberos");
+            UserGroupInformation.setConfiguration(conf);
+            try {
+                UserGroupInformation.loginUserFromKeytab(keytabUser, keytabPath);
+            } catch (IOException e) {
+                LOGGER.error("Register Kerberos has error. {}", e.getMessage());
+            }
+        }
+    }
+
+    @Override
+    @Cacheable(unless = "#result==null")
+    public Iterable<String> getAllDatabases() {
+        return queryHiveString(SHOW_DATABASE);
+    }
+
+    @Override
+    @Cacheable(unless = "#result==null")
+    public Iterable<String> getAllTableNames(String dbName) {
+        return queryHiveString(SHOW_TABLES_IN + dbName);
+    }
+
+    @Override
+    @Cacheable(unless = "#result==null")
+    public Map<String, List<String>> getAllTableNames() {
+        // If there has a lots of databases in Hive, this method will lead to Griffin crash
+        Map<String, List<String>> res = new HashMap<>();
+        for (String dbName : getAllDatabases()) {
+            List<String> list = (List<String>) queryHiveString(SHOW_TABLES_IN + dbName);
+            res.put(dbName, list);
+        }
+        return res;
+    }
+
+    @Override
+    public List<Table> getAllTable(String db) {
+        return null;
+    }
+
+    @Override
+    public Map<String, List<Table>> getAllTable() {
+        return null;
+    }
+
+    @Override
+    @Cacheable(unless = "#result==null")
+    public Table getTable(String dbName, String tableName) {
+        Table result = new Table();
+        result.setDbName(dbName);
+        result.setTableName(tableName);
+
+        String sql = SHOW_CREATE_TABLE + dbName + "." + tableName;
+        Statement stmt = null;
+        ResultSet rs = null;
+        StringBuilder sb = new StringBuilder();
+
+        try {
+            Class.forName(hiveClassName);
+            if (conn == null) {
+                conn = DriverManager.getConnection(hiveUrl);
+            }
+            LOGGER.info("got connection");
+
+            stmt = conn.createStatement();
+            rs = stmt.executeQuery(sql);
+            while (rs.next()) {
+                String s = rs.getString(1);
+                sb.append(s);
+            }
+            String location = getLocation(sb.toString());
+            List<FieldSchema> cols = getColums(sb.toString());
+            StorageDescriptor sd = new StorageDescriptor();
+            sd.setLocation(location);
+            sd.setCols(cols);
+            result.setSd(sd);
+        } catch (Exception e) {
+            LOGGER.error("Query Hive Table metadata has error. {}", e.getMessage());
+        } finally {
+            closeConnection(stmt, rs);
+        }
+        return result;
+    }
+
+    @Scheduled(fixedRateString =
+        "${cache.evict.hive.fixedRate.in.milliseconds}")
+    @CacheEvict(
+        cacheNames = "jdbcHive",
+        allEntries = true,
+        beforeInvocation = true)
+    public void evictHiveCache() {
+        LOGGER.info("Evict hive cache");
+    }
+
+    /**
+     * Query Hive for Show tables or show databases, which will return List of String
+     *
+     * @param sql sql string
+     * @return
+     */
+    private Iterable<String> queryHiveString(String sql) {
+        List<String> res = new ArrayList<>();
+        Statement stmt = null;
+        ResultSet rs = null;
+
+        try {
+            Class.forName(hiveClassName);
+            if (conn == null) {
+                conn = DriverManager.getConnection(hiveUrl);
+            }
+            LOGGER.info("got connection");
+            stmt = conn.createStatement();
+            rs = stmt.executeQuery(sql);
+            while (rs.next()) {
+                res.add(rs.getString(1));
+            }
+        } catch (Exception e) {
+            LOGGER.error("Query Hive JDBC has error, {}", e.getMessage());
+        } finally {
+            closeConnection(stmt, rs);
+        }
+        return res;
+    }
+
+
+    private void closeConnection(Statement stmt, ResultSet rs) {
+        try {
+            if (rs != null) {
+                rs.close();
+            }
+            if (stmt != null) {
+                stmt.close();
+            }
+            if (conn != null) {
+                conn.close();
+                conn = null;
+            }
+        } catch (SQLException e) {
+            LOGGER.error("Close JDBC connection has problem. {}", e.getMessage());
+        }
+    }
+
+    /**
+     * Get the Hive table location from hive table metadata string
+     *
+     * @param tableMetadata hive table metadata string
+     * @return Hive table location
+     */
+    public String getLocation(String tableMetadata) {
+        tableMetadata = tableMetadata.toLowerCase();
+        int index = tableMetadata.indexOf("location");
+        if (index == -1) {
+            return "";
+        }
+
+        int start = tableMetadata.indexOf("\'", index);
+        int end = tableMetadata.indexOf("\'", start + 1);
+
+        if (start == -1 || end == -1) {
+            return "";
+        }
+
+        return tableMetadata.substring(start + 1, end);
+    }
+
+    /**
+     * Get the Hive table schema: column name, column type, column comment
+     * The input String looks like following:
+     * <p>
+     * CREATE TABLE `employee`(
+     * `eid` int,
+     * `name` string,
+     * `salary` string,
+     * `destination` string)
+     * COMMENT 'Employee details'
+     * ROW FORMAT SERDE
+     * 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+     * WITH SERDEPROPERTIES (
+     * 'field.delim'='\t',
+     * 'line.delim'='\n',
+     * 'serialization.format'='\t')
+     * STORED AS INPUTFORMAT
+     * 'org.apache.hadoop.mapred.TextInputFormat'
+     * OUTPUTFORMAT
+     * 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+     * LOCATION
+     * 'file:/user/hive/warehouse/employee'
+     * TBLPROPERTIES (
+     * 'bucketing_version'='2',
+     * 'transient_lastDdlTime'='1562086077')
+     *
+     * @param tableMetadata hive table metadata string
+     * @return List of FieldSchema
+     */
+    public List<FieldSchema> getColums(String tableMetadata) {
+        List<FieldSchema> res = new ArrayList<>();
+        int start = tableMetadata.indexOf("(") + 1; // index of the first '('
+        int end = tableMetadata.indexOf(")", start); // index of the first ')'
+        String[] colsArr = tableMetadata.substring(start, end).split(",");
+        for (String colStr : colsArr) {
+            colStr = colStr.trim();
+            String[] parts = colStr.split(" ");
+            String colName = parts[0].trim().substring(1, parts[0].trim().length() - 1);
+            String colType = parts[1].trim();
+            String comment = getComment(colStr);
+            FieldSchema schema = new FieldSchema(colName, colType, comment);
+            res.add(schema);
+        }
+        return res;
+    }
+
+    /**
+     * Parse one column string
+     * <p>
+     * Input example:
+     * `merch_date` string COMMENT 'this is merch process date'
+     *
+     * @param colStr column string
+     * @return
+     */
+    public String getComment(String colStr) {
+        String pattern = "'([^\"|^\']|\"|\')*'";
+        Matcher m = Pattern.compile(pattern).matcher(colStr.toLowerCase());
+        if (m.find()) {
+            String text = m.group();
+            String result = text.substring(1, text.length() - 1);
+            if (!result.isEmpty()) {
+                LOGGER.info("Found value: " + result);
+            }
+            return result;
+        } else {
+            LOGGER.info("NO MATCH");
+            return "";
+        }
+    }
+}
\ No newline at end of file
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaController.java b/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaController.java
index 732a59e..63df077 100644
--- a/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaController.java
+++ b/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaController.java
@@ -48,7 +48,7 @@
 
     @RequestMapping(value = "/versions", method = RequestMethod.GET)
     public Iterable<Integer> getSubjectVersions(
-            @RequestParam("subject") String subject) {
+        @RequestParam("subject") String subject) {
         return kafkaSchemaService.getSubjectVersions(subject);
     }
 
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImpl.java b/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImpl.java
index ba6bba2..89b8985 100644
--- a/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImpl.java
@@ -36,7 +36,7 @@
 public class KafkaSchemaServiceImpl implements KafkaSchemaService {
 
     private static final Logger log = LoggerFactory
-            .getLogger(KafkaSchemaServiceImpl.class);
+        .getLogger(KafkaSchemaServiceImpl.class);
 
     @Value("${kafka.schema.registry.url}")
     private String url;
@@ -59,7 +59,7 @@
         String path = "/schemas/ids/" + id;
         String regUrl = registryUrl(path);
         ResponseEntity<SchemaString> res = restTemplate.getForEntity(regUrl,
-                SchemaString.class);
+            SchemaString.class);
         SchemaString result = res.getBody();
         return result;
     }
@@ -69,7 +69,7 @@
         String path = "/subjects";
         String regUrl = registryUrl(path);
         ResponseEntity<String[]> res = restTemplate.getForEntity(regUrl,
-                String[].class);
+            String[].class);
         Iterable<String> result = Arrays.asList(res.getBody());
         return result;
     }
@@ -79,7 +79,7 @@
         String path = "/subjects/" + subject + "/versions";
         String regUrl = registryUrl(path);
         ResponseEntity<Integer[]> res = restTemplate.getForEntity(regUrl,
-                Integer[].class);
+            Integer[].class);
         Iterable<Integer> result = Arrays.asList(res.getBody());
         return result;
     }
@@ -89,7 +89,7 @@
         String path = "/subjects/" + subject + "/versions/" + version;
         String regUrl = registryUrl(path);
         ResponseEntity<Schema> res = restTemplate.getForEntity(regUrl,
-                Schema.class);
+            Schema.class);
         Schema result = res.getBody();
         return result;
     }
@@ -99,7 +99,7 @@
         String path = "/config";
         String regUrl = registryUrl(path);
         ResponseEntity<Config> res = restTemplate.getForEntity(regUrl,
-                Config.class);
+            Config.class);
         Config result = res.getBody();
         return result;
     }
@@ -109,7 +109,7 @@
         String path = "/config/" + subject;
         String regUrl = registryUrl(path);
         ResponseEntity<Config> res = restTemplate.getForEntity(regUrl,
-                Config.class);
+            Config.class);
         Config result = res.getBody();
         return result;
     }
diff --git a/service/src/main/java/org/apache/griffin/core/metric/MetricController.java b/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
index 4d39ba3..23b1238 100644
--- a/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
+++ b/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
@@ -27,6 +27,7 @@
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.http.HttpStatus;
 import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
@@ -48,27 +49,32 @@
 
     @RequestMapping(value = "/metrics/values", method = RequestMethod.GET)
     public List<MetricValue> getMetricValues(@RequestParam("metricName")
-                                                     String metricName,
+                                                 String metricName,
                                              @RequestParam("size") int size,
                                              @RequestParam(value = "offset",
-                                                     defaultValue = "0")
-                                                     int offset,
+                                                 defaultValue = "0")
+                                                 int offset,
                                              @RequestParam(value = "tmst",
-                                                     defaultValue = "0")
-                                                     long tmst) {
+                                                 defaultValue = "0")
+                                                 long tmst) {
         return metricService.getMetricValues(metricName, offset, size, tmst);
     }
 
     @RequestMapping(value = "/metrics/values", method = RequestMethod.POST)
     public ResponseEntity<?> addMetricValues(@RequestBody List<MetricValue>
-                                                     values) {
+                                                 values) {
         return metricService.addMetricValues(values);
     }
 
     @RequestMapping(value = "/metrics/values", method = RequestMethod.DELETE)
     @ResponseStatus(HttpStatus.NO_CONTENT)
     public ResponseEntity<?> deleteMetricValues(@RequestParam("metricName")
-                                                        String metricName) {
+                                                    String metricName) {
         return metricService.deleteMetricValues(metricName);
     }
+
+    @RequestMapping(value = "/metrics/values/{instanceId}", method = RequestMethod.GET)
+    public MetricValue getMetric(@PathVariable("instanceId") Long id) {
+        return metricService.findMetric(id);
+    }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/metric/MetricService.java b/service/src/main/java/org/apache/griffin/core/metric/MetricService.java
index 039b0cb..75402ff 100644
--- a/service/src/main/java/org/apache/griffin/core/metric/MetricService.java
+++ b/service/src/main/java/org/apache/griffin/core/metric/MetricService.java
@@ -37,4 +37,6 @@
     ResponseEntity addMetricValues(List<MetricValue> values);
 
     ResponseEntity<?> deleteMetricValues(String metricName);
+
+    MetricValue findMetric(Long id);
 }
diff --git a/service/src/main/java/org/apache/griffin/core/metric/MetricServiceImpl.java b/service/src/main/java/org/apache/griffin/core/metric/MetricServiceImpl.java
index f817f65..886e6b9 100644
--- a/service/src/main/java/org/apache/griffin/core/metric/MetricServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/metric/MetricServiceImpl.java
@@ -23,6 +23,7 @@
 import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_METRIC_RECORDS_OFFSET;
 import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_METRIC_RECORDS_SIZE;
 import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_METRIC_VALUE_FORMAT;
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_INSTANCE_NOT_FOUND;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -36,6 +37,8 @@
 import org.apache.commons.lang.StringUtils;
 import org.apache.griffin.core.exception.GriffinException;
 import org.apache.griffin.core.job.entity.AbstractJob;
+import org.apache.griffin.core.job.entity.JobInstanceBean;
+import org.apache.griffin.core.job.repo.JobInstanceRepo;
 import org.apache.griffin.core.job.repo.JobRepo;
 import org.apache.griffin.core.measure.entity.Measure;
 import org.apache.griffin.core.measure.repo.MeasureRepo;
@@ -51,7 +54,7 @@
 @Service
 public class MetricServiceImpl implements MetricService {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(MetricServiceImpl.class);
+        .getLogger(MetricServiceImpl.class);
 
     @Autowired
     private MeasureRepo<Measure> measureRepo;
@@ -59,6 +62,8 @@
     private JobRepo<AbstractJob> jobRepo;
     @Autowired
     private MetricStore metricStore;
+    @Autowired
+    private JobInstanceRepo jobInstanceRepo;
 
     @Override
     public Map<String, List<Metric>> getAllMetrics() {
@@ -66,9 +71,9 @@
         List<AbstractJob> jobs = jobRepo.findByDeleted(false);
         List<Measure> measures = measureRepo.findByDeleted(false);
         Map<Long, Measure> measureMap = measures.stream().collect(Collectors
-                .toMap(Measure::getId, Function.identity()));
+            .toMap(Measure::getId, Function.identity()));
         Map<Long, List<AbstractJob>> jobMap = jobs.stream().collect(Collectors
-                .groupingBy(AbstractJob::getMeasureId, Collectors.toList()));
+            .groupingBy(AbstractJob::getMeasureId, Collectors.toList()));
         for (Map.Entry<Long, List<AbstractJob>> entry : jobMap.entrySet()) {
             Long measureId = entry.getKey();
             Measure measure = measureMap.get(measureId);
@@ -76,9 +81,9 @@
             List<Metric> metrics = new ArrayList<>();
             for (AbstractJob job : jobList) {
                 List<MetricValue> metricValues = getMetricValues(job
-                        .getMetricName(), 0, 300, job.getCreatedDate());
+                    .getMetricName(), 0, 300, job.getCreatedDate());
                 metrics.add(new Metric(job.getMetricName(), measure.getDqType(),
-                        measure.getOwner(), metricValues));
+                    measure.getOwner(), metricValues));
             }
             metricMap.put(measure.getName(), metrics);
 
@@ -91,19 +96,19 @@
                                              int size, long tmst) {
         if (offset < 0) {
             throw new GriffinException.BadRequestException
-                    (INVALID_METRIC_RECORDS_OFFSET);
+                (INVALID_METRIC_RECORDS_OFFSET);
         }
         if (size < 0) {
             throw new GriffinException.BadRequestException
-                    (INVALID_METRIC_RECORDS_SIZE);
+                (INVALID_METRIC_RECORDS_SIZE);
         }
         try {
             return metricStore.getMetricValues(metricName, offset, size, tmst);
         } catch (IOException e) {
             LOGGER.error("Failed to get metric values named {}. {}",
-                    metricName, e.getMessage());
+                metricName, e.getMessage());
             throw new GriffinException.ServiceException(
-                    "Failed to get metric values", e);
+                "Failed to get metric values", e);
         }
     }
 
@@ -118,11 +123,11 @@
         } catch (JsonProcessingException e) {
             LOGGER.warn("Failed to parse metric value.", e.getMessage());
             throw new GriffinException.BadRequestException
-                    (INVALID_METRIC_VALUE_FORMAT);
+                (INVALID_METRIC_VALUE_FORMAT);
         } catch (IOException e) {
             LOGGER.error("Failed to add metric values", e);
             throw new GriffinException.ServiceException(
-                    "Failed to add metric values", e);
+                "Failed to add metric values", e);
         }
     }
 
@@ -133,17 +138,34 @@
             return metricStore.deleteMetricValues(metricName);
         } catch (IOException e) {
             LOGGER.error("Failed to delete metric values named {}. {}",
-                    metricName, e.getMessage());
+                metricName, e.getMessage());
             throw new GriffinException.ServiceException(
-                    "Failed to delete metric values.", e);
+                "Failed to delete metric values.", e);
+        }
+    }
+
+    @Override
+    public MetricValue findMetric(Long id) {
+        JobInstanceBean jobInstanceBean = jobInstanceRepo.findByInstanceId(id);
+        if (jobInstanceBean == null) {
+            LOGGER.warn("There are no job instances with id {} ", id);
+            throw new GriffinException
+                .NotFoundException(JOB_INSTANCE_NOT_FOUND);
+        }
+        String appId = jobInstanceBean.getAppId();
+        try {
+            return metricStore.getMetric(appId);
+        } catch (IOException e) {
+            LOGGER.warn("Failed to get metric for applicationId {} ", appId);
+            throw new GriffinException.ServiceException("Failed to find metric", e);
         }
     }
 
     private void checkFormat(MetricValue value) {
         if (StringUtils.isBlank(value.getName()) || value.getTmst() == null
-                || MapUtils.isEmpty(value.getValue())) {
+            || MapUtils.isEmpty(value.getValue())) {
             throw new GriffinException.BadRequestException
-                    (INVALID_METRIC_VALUE_FORMAT);
+                (INVALID_METRIC_VALUE_FORMAT);
         }
     }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/metric/MetricStore.java b/service/src/main/java/org/apache/griffin/core/metric/MetricStore.java
index a452648..9510cce 100644
--- a/service/src/main/java/org/apache/griffin/core/metric/MetricStore.java
+++ b/service/src/main/java/org/apache/griffin/core/metric/MetricStore.java
@@ -31,7 +31,9 @@
                                       long tmst) throws IOException;
 
     ResponseEntity<?> addMetricValues(List<MetricValue> metricValues)
-            throws IOException;
+        throws IOException;
 
     ResponseEntity<?> deleteMetricValues(String metricName) throws IOException;
+
+    MetricValue getMetric(String applicationId) throws IOException;
 }
diff --git a/service/src/main/java/org/apache/griffin/core/metric/MetricStoreImpl.java b/service/src/main/java/org/apache/griffin/core/metric/MetricStoreImpl.java
index 3bff8f5..cc4ade8 100644
--- a/service/src/main/java/org/apache/griffin/core/metric/MetricStoreImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/metric/MetricStoreImpl.java
@@ -31,6 +31,7 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 
 import org.apache.griffin.core.metric.model.MetricValue;
 import org.apache.griffin.core.util.JsonUtil;
@@ -76,8 +77,8 @@
         if (!user.isEmpty() && !password.isEmpty()) {
             String encodedAuth = buildBasicAuthString(user, password);
             Header[] requestHeaders = new Header[]{
-                    new BasicHeader(org.apache.http.HttpHeaders.AUTHORIZATION,
-                            encodedAuth)};
+                new BasicHeader(org.apache.http.HttpHeaders.AUTHORIZATION,
+                    encodedAuth)};
             builder.setDefaultHeaders(requestHeaders);
         }
         this.client = builder.build();
@@ -89,22 +90,22 @@
         this.urlPost = urlBase.concat("/_bulk");
         this.urlDelete = urlBase.concat("/_delete_by_query");
         this.indexMetaData = String.format(
-                "{ \"index\" : { \"_index\" : " +
-                        "\"%s\",\"_type\" : \"%s\" } }%n",
-                INDEX,
-                TYPE);
+            "{ \"index\" : { \"_index\" : " +
+                "\"%s\",\"_type\" : \"%s\" } }%n",
+            INDEX,
+            TYPE);
         this.mapper = new ObjectMapper();
     }
 
     @Override
     public List<MetricValue> getMetricValues(String metricName, int from,
                                              int size, long tmst)
-            throws IOException {
+        throws IOException {
         HttpEntity entity = getHttpEntityForSearch(metricName, from, size,
-                tmst);
+            tmst);
         try {
             Response response = client.performRequest("GET", urlGet,
-                    Collections.emptyMap(), entity);
+                Collections.emptyMap(), entity);
             return getMetricValuesFromResponse(response);
         } catch (ResponseException e) {
             if (e.getResponse().getStatusLine().getStatusCode() == 404) {
@@ -115,42 +116,46 @@
     }
 
     private HttpEntity getHttpEntityForSearch(String metricName, int from, int
-            size, long tmst)
-            throws JsonProcessingException {
+        size, long tmst)
+        throws JsonProcessingException {
         Map<String, Object> map = new HashMap<>();
         Map<String, Object> queryParam = new HashMap<>();
         Map<String, Object> termQuery = Collections.singletonMap("name.keyword",
-                metricName);
+            metricName);
         queryParam.put("filter", Collections.singletonMap("term", termQuery));
         Map<String, Object> sortParam = Collections
-                .singletonMap("tmst", Collections.singletonMap("order",
-                        "desc"));
+            .singletonMap("tmst", Collections.singletonMap("order",
+                "desc"));
         map.put("query", Collections.singletonMap("bool", queryParam));
         map.put("sort", sortParam);
         map.put("from", from);
         map.put("size", size);
         return new NStringEntity(JsonUtil.toJson(map),
-                ContentType.APPLICATION_JSON);
+            ContentType.APPLICATION_JSON);
     }
 
     private List<MetricValue> getMetricValuesFromResponse(Response response)
-            throws IOException {
+        throws IOException {
         List<MetricValue> metricValues = new ArrayList<>();
         JsonNode jsonNode = mapper.readTree(EntityUtils.toString(response
-                .getEntity()));
+            .getEntity()));
         if (jsonNode.hasNonNull("hits") && jsonNode.get("hits")
-                .hasNonNull("hits")) {
+            .hasNonNull("hits")) {
             for (JsonNode node : jsonNode.get("hits").get("hits")) {
                 JsonNode sourceNode = node.get("_source");
-                Map<String, Object> value = JsonUtil.toEntity(sourceNode
-                                .get("value").toString(),
-                        new TypeReference<Map<String, Object>>() {
-                        });
-                MetricValue metricValue = new MetricValue(sourceNode
-                        .get("name")
-                        .asText(),
-                        Long.parseLong(sourceNode.get("tmst").asText()),
-                        value);
+                Map<String, Object> value = JsonUtil.toEntity(
+                    sourceNode.get("value").toString(),
+                    new TypeReference<Map<String, Object>>() {
+                    });
+                Map<String, Object> meta = JsonUtil.toEntity(
+                    Objects.toString(sourceNode.get("metadata"), null),
+                    new TypeReference<Map<String, Object>>() {
+                    });
+                MetricValue metricValue = new MetricValue(
+                    sourceNode.get("name").asText(),
+                    Long.parseLong(sourceNode.get("tmst").asText()),
+                    meta,
+                    value);
                 metricValues.add(metricValue);
             }
         }
@@ -159,17 +164,17 @@
 
     @Override
     public ResponseEntity<?> addMetricValues(List<MetricValue> metricValues)
-            throws IOException {
+        throws IOException {
         String bulkRequestBody = getBulkRequestBody(metricValues);
         HttpEntity entity = new NStringEntity(bulkRequestBody,
-                ContentType.APPLICATION_JSON);
+            ContentType.APPLICATION_JSON);
         Response response = client.performRequest("POST", urlPost,
-                Collections.emptyMap(), entity);
+            Collections.emptyMap(), entity);
         return getResponseEntityFromResponse(response);
     }
 
     private String getBulkRequestBody(List<MetricValue> metricValues) throws
-            JsonProcessingException {
+        JsonProcessingException {
         StringBuilder bulkRequestBody = new StringBuilder();
         for (MetricValue metricValue : metricValues) {
             bulkRequestBody.append(indexMetaData);
@@ -181,29 +186,39 @@
 
     @Override
     public ResponseEntity<?> deleteMetricValues(String metricName) throws
-            IOException {
+        IOException {
         Map<String, Object> param = Collections.singletonMap("query",
-                Collections.singletonMap("term",
-                        Collections.singletonMap("name.keyword", metricName)));
+            Collections.singletonMap("term",
+                Collections.singletonMap("name.keyword", metricName)));
         HttpEntity entity = new NStringEntity(
-                JsonUtil.toJson(param),
-                ContentType.APPLICATION_JSON);
+            JsonUtil.toJson(param),
+            ContentType.APPLICATION_JSON);
         Response response = client.performRequest("POST", urlDelete,
-                Collections.emptyMap(), entity);
+            Collections.emptyMap(), entity);
         return getResponseEntityFromResponse(response);
     }
 
     private ResponseEntity<?> getResponseEntityFromResponse(Response response)
-            throws IOException {
+        throws IOException {
         String body = EntityUtils.toString(response.getEntity());
         HttpStatus status = HttpStatus.valueOf(response.getStatusLine()
-                .getStatusCode());
+            .getStatusCode());
         return new ResponseEntity<>(body, responseHeaders, status);
     }
 
     private static String buildBasicAuthString(String user, String password) {
         String auth = user + ":" + password;
         return String.format("Basic %s", Base64.getEncoder().encodeToString(
-                auth.getBytes()));
+            auth.getBytes()));
+    }
+
+    @Override
+    public MetricValue getMetric(String applicationId) throws IOException {
+        Response response = client.performRequest(
+            "GET", urlGet,
+            Collections.singletonMap(
+                "q", "metadata.applicationId:" + applicationId));
+        List<MetricValue> metricValues = getMetricValuesFromResponse(response);
+        return metricValues.get(0);
     }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/metric/model/MetricValue.java b/service/src/main/java/org/apache/griffin/core/metric/model/MetricValue.java
index 4839f9f..a540c9b 100644
--- a/service/src/main/java/org/apache/griffin/core/metric/model/MetricValue.java
+++ b/service/src/main/java/org/apache/griffin/core/metric/model/MetricValue.java
@@ -19,7 +19,9 @@
 
 package org.apache.griffin.core.metric.model;
 
+import java.util.Collections;
 import java.util.Map;
+import java.util.Objects;
 
 public class MetricValue {
 
@@ -27,6 +29,8 @@
 
     private Long tmst;
 
+    private Map<String, Object> metadata;
+
     private Map<String, Object> value;
 
     public MetricValue() {
@@ -36,6 +40,15 @@
         this.name = name;
         this.tmst = tmst;
         this.value = value;
+        this.metadata = Collections.emptyMap();
+    }
+
+
+    public MetricValue(String name, Long tmst, Map<String, Object> metadata, Map<String, Object> value) {
+        this.name = name;
+        this.tmst = tmst;
+        this.metadata = metadata;
+        this.value = value;
     }
 
     public String getName() {
@@ -61,4 +74,35 @@
     public void setValue(Map<String, Object> value) {
         this.value = value;
     }
+
+    public Map<String, Object> getMetadata() {
+        return metadata;
+    }
+
+    public void setMetadata(Map<String, Object> metadata) {
+        this.metadata = metadata;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+        MetricValue that = (MetricValue) o;
+        return Objects.equals(name, that.name) &&
+            Objects.equals(tmst, that.tmst) &&
+            Objects.equals(metadata, that.metadata) &&
+            Objects.equals(value, that.value);
+    }
+
+    @Override
+    public int hashCode() {
+        return Objects.hash(name, tmst, metadata, value);
+    }
+
+    @Override
+    public String toString() {
+        return String.format(
+            "MetricValue{name=%s, ts=%s, meta=%s, value=%s}",
+            name, tmst, metadata, value);
+    }
 }
diff --git a/service/src/main/java/org/apache/griffin/core/util/FSUtil.java b/service/src/main/java/org/apache/griffin/core/util/FSUtil.java
index b537ac6..b6985a4 100644
--- a/service/src/main/java/org/apache/griffin/core/util/FSUtil.java
+++ b/service/src/main/java/org/apache/griffin/core/util/FSUtil.java
@@ -54,6 +54,7 @@
     private static FileSystem fileSystem;
 
     private static FileSystem defaultFS = getDefaultFileSystem();
+
     private static FileSystem getDefaultFileSystem() {
         FileSystem fs = null;
         Configuration conf = new Configuration();
@@ -173,12 +174,12 @@
     }
 
     public static InputStream getSampleInputStream(String path)
-            throws IOException {
+        throws IOException {
         checkHDFSConf();
         if (isFileExist(path)) {
             FSDataInputStream missingData = fileSystem.open(new Path(path));
             BufferedReader bufReader = new BufferedReader(
-                    new InputStreamReader(missingData, Charsets.UTF_8));
+                new InputStreamReader(missingData, Charsets.UTF_8));
             try {
                 String line = null;
                 int rowCnt = 0;
@@ -205,16 +206,16 @@
     private static void checkHDFSConf() {
         if (getFileSystem() == null) {
             throw new NullPointerException("FileSystem is null. " +
-                    "Please check your hdfs config default name.");
+                "Please check your hdfs config default name.");
         }
     }
 
     public static String getFirstMissRecordPath(String hdfsDir)
-            throws Exception {
+        throws Exception {
         List<FileStatus> fileList = listFileStatus(hdfsDir);
         for (int i = 0; i < fileList.size(); i++) {
             if (fileList.get(i).getPath().toUri().toString().toLowerCase()
-                    .contains("missrecord")) {
+                .contains("missrecord")) {
                 return fileList.get(i).getPath().toUri().toString();
             }
         }
@@ -222,12 +223,12 @@
     }
 
     public static InputStream getMissSampleInputStream(String path)
-            throws Exception {
+        throws Exception {
         List<String> subDirList = listSubDir(path);
         //FIXME: only handle 1-sub dir here now
         for (int i = 0; i < subDirList.size(); i++) {
             return getSampleInputStream(getFirstMissRecordPath(
-                    subDirList.get(i)));
+                subDirList.get(i)));
         }
         return getSampleInputStream(getFirstMissRecordPath(path));
     }
diff --git a/service/src/main/java/org/apache/griffin/core/util/FileUtil.java b/service/src/main/java/org/apache/griffin/core/util/FileUtil.java
index d2209f9..03efaf2 100644
--- a/service/src/main/java/org/apache/griffin/core/util/FileUtil.java
+++ b/service/src/main/java/org/apache/griffin/core/util/FileUtil.java
@@ -27,7 +27,7 @@
 
 public class FileUtil {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(FileUtil.class);
+        .getLogger(FileUtil.class);
 
     public static String getFilePath(String name, String location) {
         if (StringUtils.isEmpty(location)) {
@@ -39,7 +39,7 @@
         File[] files = file.listFiles();
         if (files == null) {
             LOGGER.warn("The external location '{}' does not exist.Read from"
-                    + "default path.", location);
+                + "default path.", location);
             return null;
         }
         return getFilePath(name, files, location);
diff --git a/service/src/main/java/org/apache/griffin/core/util/JsonUtil.java b/service/src/main/java/org/apache/griffin/core/util/JsonUtil.java
index 0fe6449..0c1ef98 100644
--- a/service/src/main/java/org/apache/griffin/core/util/JsonUtil.java
+++ b/service/src/main/java/org/apache/griffin/core/util/JsonUtil.java
@@ -35,7 +35,7 @@
 
 public class JsonUtil {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(JsonUtil.class);
+        .getLogger(JsonUtil.class);
 
     public static String toJson(Object obj) throws JsonProcessingException {
         if (obj == null) {
@@ -47,30 +47,30 @@
     }
 
     public static String toJsonWithFormat(Object obj)
-            throws JsonProcessingException {
+        throws JsonProcessingException {
         if (obj == null) {
             LOGGER.warn("Object to be formatted cannot be empty!");
             return null;
         }
         ObjectWriter mapper = new ObjectMapper().writer()
-                .withDefaultPrettyPrinter();
+            .withDefaultPrettyPrinter();
         return mapper.writeValueAsString(obj);
     }
 
     public static <T> T toEntity(String jsonStr, Class<T> type)
-            throws IOException {
+        throws IOException {
         if (StringUtils.isEmpty(jsonStr)) {
             LOGGER.warn("Json string {} is empty!", type);
             return null;
         }
         ObjectMapper mapper = new ObjectMapper();
         mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
-                false);
+            false);
         return mapper.readValue(jsonStr, type);
     }
 
     public static <T> T toEntity(File file, TypeReference type)
-            throws IOException {
+        throws IOException {
         if (file == null) {
             LOGGER.warn("File cannot be empty!");
             return null;
@@ -80,7 +80,7 @@
     }
 
     public static <T> T toEntity(InputStream in, TypeReference type)
-            throws IOException {
+        throws IOException {
         if (in == null) {
             throw new NullPointerException("Input stream cannot be null.");
         }
@@ -89,7 +89,7 @@
     }
 
     public static <T> T toEntity(String jsonStr, TypeReference type)
-            throws IOException {
+        throws IOException {
         if (StringUtils.isEmpty(jsonStr)) {
             LOGGER.warn("Json string {} is empty!", type);
             return null;
diff --git a/service/src/main/java/org/apache/griffin/core/util/MeasureUtil.java b/service/src/main/java/org/apache/griffin/core/util/MeasureUtil.java
index 56ef204..7e57d87 100644
--- a/service/src/main/java/org/apache/griffin/core/util/MeasureUtil.java
+++ b/service/src/main/java/org/apache/griffin/core/util/MeasureUtil.java
@@ -20,6 +20,7 @@
 package org.apache.griffin.core.util;
 
 import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_CONNECTOR_NAME;
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_MEASURE_PREDICATE;
 import static org.apache.griffin.core.exception.GriffinExceptionMessage.MISSING_METRIC_NAME;
 
 import java.util.ArrayList;
@@ -29,6 +30,9 @@
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.griffin.core.exception.GriffinException;
+import org.apache.griffin.core.job.entity.SegmentPredicate;
+import org.apache.griffin.core.job.factory.PredicatorFactory;
+import org.apache.griffin.core.measure.entity.DataConnector;
 import org.apache.griffin.core.measure.entity.DataSource;
 import org.apache.griffin.core.measure.entity.ExternalMeasure;
 import org.apache.griffin.core.measure.entity.GriffinMeasure;
@@ -38,7 +42,7 @@
 
 public class MeasureUtil {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(MeasureUtil.class);
+        .getLogger(MeasureUtil.class);
 
     public static void validateMeasure(Measure measure) {
         if (measure instanceof GriffinMeasure) {
@@ -52,14 +56,32 @@
     private static void validateGriffinMeasure(GriffinMeasure measure) {
         if (getConnectorNamesIfValid(measure) == null) {
             throw new GriffinException.BadRequestException
-                    (INVALID_CONNECTOR_NAME);
+                (INVALID_CONNECTOR_NAME);
         }
+        if (!validatePredicates(measure)) {
+            throw new GriffinException.BadRequestException(INVALID_MEASURE_PREDICATE);
+        }
+    }
+
+    private static boolean validatePredicates(GriffinMeasure measure) {
+        for (DataSource dataSource : measure.getDataSources()) {
+            for (DataConnector dataConnector : dataSource.getConnectors()) {
+                for (SegmentPredicate segmentPredicate : dataConnector.getPredicates()) {
+                    try {
+                        PredicatorFactory.newPredicateInstance(segmentPredicate);
+                    } catch (Exception e) {
+                        return false;
+                    }
+                }
+            }
+        }
+        return true;
     }
 
     private static void validateExternalMeasure(ExternalMeasure measure) {
         if (StringUtils.isBlank(measure.getMetricName())) {
             LOGGER.warn("Failed to create external measure {}. " +
-                    "Its metric name is blank.", measure.getName());
+                "Its metric name is blank.", measure.getName());
             throw new GriffinException.BadRequestException(MISSING_METRIC_NAME);
         }
     }
@@ -69,7 +91,7 @@
         List<DataSource> sources = measure.getDataSources();
         for (DataSource source : sources) {
             source.getConnectors().stream().filter(dc -> dc.getName() != null)
-                    .forEach(dc -> sets.add(dc.getName()));
+                .forEach(dc -> sets.add(dc.getName()));
         }
         if (sets.size() == 0 || sets.size() < sources.size()) {
             LOGGER.warn("Connector names cannot be repeated or empty.");
diff --git a/service/src/main/java/org/apache/griffin/core/util/PropertiesUtil.java b/service/src/main/java/org/apache/griffin/core/util/PropertiesUtil.java
index 65f1707..6e5a8f7 100644
--- a/service/src/main/java/org/apache/griffin/core/util/PropertiesUtil.java
+++ b/service/src/main/java/org/apache/griffin/core/util/PropertiesUtil.java
@@ -35,7 +35,7 @@
 
 public class PropertiesUtil {
     private static final Logger LOGGER = LoggerFactory.getLogger(
-            PropertiesUtil.class);
+        PropertiesUtil.class);
 
     public static Properties getProperties(String path, Resource resource) {
         PropertiesFactoryBean propFactoryBean = new PropertiesFactoryBean();
@@ -61,7 +61,7 @@
      */
     public static Properties getConf(String name, String defaultPath,
                                      String location)
-            throws FileNotFoundException {
+        throws FileNotFoundException {
         String path = getConfPath(name, location);
         Resource resource;
         if (path == null) {
diff --git a/service/src/main/java/org/apache/griffin/core/util/TimeUtil.java b/service/src/main/java/org/apache/griffin/core/util/TimeUtil.java
index 17e08d6..077b658 100644
--- a/service/src/main/java/org/apache/griffin/core/util/TimeUtil.java
+++ b/service/src/main/java/org/apache/griffin/core/util/TimeUtil.java
@@ -34,17 +34,17 @@
 
 public class TimeUtil {
     private static final Logger LOGGER = LoggerFactory.getLogger(TimeUtil
-            .class);
+        .class);
     private static final String MILLISECONDS_PATTERN =
-            "(?i)m(illi)?s(ec(ond)?)?";
+        "(?i)m(illi)?s(ec(ond)?)?";
     private static final String SECONDS_PATTERN =
-            "(?i)s(ec(ond)?)?";
+        "(?i)s(ec(ond)?)?";
     private static final String MINUTES_PATTERN =
-            "(?i)m(in(ute)?)?";
+        "(?i)m(in(ute)?)?";
     private static final String HOURS_PATTERN =
-            "(?i)h((ou)?r)?";
+        "(?i)h((ou)?r)?";
     private static final String DAYS_PATTERN =
-            "(?i)d(ay)?";
+        "(?i)d(ay)?";
 
     private static class TimeUnitPair {
         private long t;
@@ -114,9 +114,9 @@
             return milliseconds(t, TimeUnit.DAYS);
         } else {
             LOGGER.warn("Time string format ERROR. " +
-                    "It only supports d(day),h(hour), m(minute), " +
-                    "s(second), ms(millsecond). " +
-                    "Please check your time format.");
+                "It only supports d(day),h(hour), m(minute), " +
+                "s(second), ms(millsecond). " +
+                "Please check your time format.");
             return 0L;
         }
     }
diff --git a/service/src/main/java/org/apache/griffin/core/util/YarnNetUtil.java b/service/src/main/java/org/apache/griffin/core/util/YarnNetUtil.java
index ba0a2e0..10b41ae 100644
--- a/service/src/main/java/org/apache/griffin/core/util/YarnNetUtil.java
+++ b/service/src/main/java/org/apache/griffin/core/util/YarnNetUtil.java
@@ -23,6 +23,7 @@
 
 import com.google.gson.JsonObject;
 import com.google.gson.JsonParser;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.griffin.core.job.entity.JobInstanceBean;
 import org.apache.griffin.core.job.entity.LivySessionStates;
@@ -34,13 +35,13 @@
 
 public class YarnNetUtil {
     private static final Logger LOGGER = LoggerFactory
-            .getLogger(YarnNetUtil.class);
+        .getLogger(YarnNetUtil.class);
     private static RestTemplate restTemplate = new RestTemplate();
 
     /**
      * delete app task scheduling by yarn.
      *
-     * @param url prefix part of whole url
+     * @param url   prefix part of whole url
      * @param appId application id
      */
     public static void delete(String url, String appId) {
@@ -48,12 +49,12 @@
             if (appId != null) {
                 LOGGER.info("{} will delete by yarn", appId);
                 restTemplate.put(url + "ws/v1/cluster/apps/"
-                                + appId + "/state",
-                        "{\"state\": \"KILLED\"}");
+                        + appId + "/state",
+                    "{\"state\": \"KILLED\"}");
             }
         } catch (HttpClientErrorException e) {
             LOGGER.warn("client error {} from yarn: {}",
-                    e.getMessage(), e.getResponseBodyAsString());
+                e.getMessage(), e.getResponseBodyAsString());
         } catch (Exception e) {
             LOGGER.error("delete exception happens by yarn. {}", e);
         }
@@ -62,7 +63,7 @@
     /**
      * update app task scheduling by yarn.
      *
-     * @param url prefix part of whole url
+     * @param url      prefix part of whole url
      * @param instance job instance
      * @return
      */
@@ -77,7 +78,7 @@
             return true;
         } catch (HttpClientErrorException e) {
             LOGGER.warn("client error {} from yarn: {}",
-                    e.getMessage(), e.getResponseBodyAsString());
+                e.getMessage(), e.getResponseBodyAsString());
             if (e.getStatusCode() == HttpStatus.NOT_FOUND) {
                 // in sync with Livy behavior, see com.cloudera.livy.utils.SparkYarnApp
                 instance.setState(DEAD);
diff --git a/service/src/main/resources/Init_quartz_postgres.sql b/service/src/main/resources/Init_quartz_postgres.sql
index fb6e813..b5a930c 100644
--- a/service/src/main/resources/Init_quartz_postgres.sql
+++ b/service/src/main/resources/Init_quartz_postgres.sql
@@ -19,17 +19,17 @@
 -- In your Quartz properties file, you'll need to set
 -- org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.PostgreSQLDelegate
 
-drop table qrtz_fired_triggers;
-DROP TABLE QRTZ_PAUSED_TRIGGER_GRPS;
-DROP TABLE QRTZ_SCHEDULER_STATE;
-DROP TABLE QRTZ_LOCKS;
-drop table qrtz_simple_triggers;
-drop table qrtz_cron_triggers;
-drop table qrtz_simprop_triggers;
-DROP TABLE QRTZ_BLOB_TRIGGERS;
-drop table qrtz_triggers;
-drop table qrtz_job_details;
-drop table qrtz_calendars;
+DROP TABLE IF EXISTS qrtz_fired_triggers;
+DROP TABLE IF EXISTS qrtz_paused_trigger_grps;
+DROP TABLE IF EXISTS qrtz_scheduler_state;
+DROP TABLE IF EXISTS qrtz_locks;
+DROP TABLE IF EXISTS qrtz_simple_triggers;
+DROP TABLE IF EXISTS qrtz_cron_triggers;
+DROP TABLE IF EXISTS qrtz_simprop_triggers;
+DROP TABLE IF EXISTS qrtz_blob_triggers;
+DROP TABLE IF EXISTS qrtz_triggers;
+DROP TABLE IF EXISTS qrtz_job_details;
+DROP TABLE IF EXISTS qrtz_calendars;
 
 CREATE TABLE qrtz_job_details
   (
diff --git a/service/src/main/resources/application-dev.properties b/service/src/main/resources/application-dev.properties
index 544dbb4..14bd198 100644
--- a/service/src/main/resources/application-dev.properties
+++ b/service/src/main/resources/application-dev.properties
@@ -26,3 +26,5 @@
 # enable h2 console, default path: http://localhost:8080/h2-console/
 spring.h2.console.enabled=true
 spring.jpa.show-sql=true
+
+logging.file=logs/griffin-service.log
\ No newline at end of file
diff --git a/service/src/main/resources/application-docker.properties b/service/src/main/resources/application-docker.properties
index e07979b..2a0bf2b 100644
--- a/service/src/main/resources/application-docker.properties
+++ b/service/src/main/resources/application-docker.properties
@@ -74,6 +74,11 @@
 
 # livy
 livy.uri=http://10.148.215.23:38998/batches
-
+livy.need.queue=false
+livy.task.max.concurrent.count=20
+livy.task.submit.interval.second=3
+livy.task.appId.retry.count=3
 # yarn url
 yarn.uri=http://10.148.215.23:38088
+
+logging.file=logs/griffin-service.log
\ No newline at end of file
diff --git a/service/src/main/resources/application-mysql.properties b/service/src/main/resources/application-mysql.properties
index 0b124ef..e80534b 100644
--- a/service/src/main/resources/application-mysql.properties
+++ b/service/src/main/resources/application-mysql.properties
@@ -25,3 +25,5 @@
 spring.datasource.driver-class-name=com.mysql.jdbc.Driver
 spring.jpa.show-sql=true
 spring.jpa.hibernate.ddl-auto=update
+
+logging.file=logs/griffin-service.log
\ No newline at end of file
diff --git a/service/src/main/resources/application-prod.properties b/service/src/main/resources/application-prod.properties
index 199a5a3..29afae5 100644
--- a/service/src/main/resources/application-prod.properties
+++ b/service/src/main/resources/application-prod.properties
@@ -60,5 +60,11 @@
 # elasticsearch.password = password
 # livy
 livy.uri=http://localhost:8998/batches
+livy.need.queue=false
+livy.task.max.concurrent.count=20
+livy.task.submit.interval.second=3
+livy.task.appId.retry.count=3
 # yarn url
-yarn.uri=http://localhost:8088
\ No newline at end of file
+yarn.uri=http://localhost:8088
+
+logging.file=logs/griffin-service.log
\ No newline at end of file
diff --git a/service/src/main/resources/application.properties b/service/src/main/resources/application.properties
index 1c26319..ccef9dd 100644
--- a/service/src/main/resources/application.properties
+++ b/service/src/main/resources/application.properties
@@ -27,6 +27,12 @@
 hive.metastore.dbname=default
 hive.hmshandler.retry.attempts=15
 hive.hmshandler.retry.interval=2000ms
+#Hive jdbc
+hive.jdbc.className=org.apache.hive.jdbc.HiveDriver
+hive.jdbc.url=jdbc:hive2://localhost:10000/
+hive.need.kerberos=false
+hive.keytab.user=xxx@xx.com
+hive.keytab.path=/path/to/keytab/file
 # Hive cache time
 cache.evict.hive.fixedRate.in.milliseconds=900000
 # Kafka schema registry
@@ -60,7 +66,16 @@
 # elasticsearch.password = password
 # livy
 livy.uri=http://localhost:8998/batches
+livy.need.queue=false
+livy.task.max.concurrent.count=20
+livy.task.submit.interval.second=3
+livy.task.appId.retry.count=3
+livy.need.kerberos=false
+livy.server.auth.kerberos.principal=livy/kerberos.principal
+livy.server.auth.kerberos.keytab=/path/to/livy/keytab/file
 # yarn url
 yarn.uri=http://localhost:8088
 # griffin event listener
 internal.event.listeners=GriffinJobEventHook
+
+logging.file=logs/griffin-service.log
\ No newline at end of file
diff --git a/service/src/main/resources/banner.txt b/service/src/main/resources/banner.txt
index a29770f..4b81cb9 100644
--- a/service/src/main/resources/banner.txt
+++ b/service/src/main/resources/banner.txt
@@ -2,4 +2,4 @@
 __  ____/___  __ \____  _/___  ____/___  ____/____  _/___  | / /
 _  / __  __  /_/ / __  /  __  /_    __  /_     __  /  __   |/ /
 / /_/ /  _  _, _/ __/ /   _  __/    _  __/    __/ /   _  /|  /
-\____/   /_/ |_|  /___/   /_/       /_/       /___/   /_/ |_/   version: 0.4.0
+\____/   /_/ |_|  /___/   /_/       /_/       /___/   /_/ |_/   version: 0.5.0
diff --git a/service/src/main/resources/log4j2-spring.xml b/service/src/main/resources/log4j2-spring.xml
new file mode 100644
index 0000000..c021b4a
--- /dev/null
+++ b/service/src/main/resources/log4j2-spring.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Configuration status="WARN">
+    <Properties>
+        <Property name="PID">????</Property>
+        <Property name="LOG_EXCEPTION_CONVERSION_WORD">%xwEx</Property>
+        <Property name="LOG_LEVEL_PATTERN">%5p</Property>
+        <Property name="CONSOLE_LOG_PATTERN">%clr{%d{yyyy-MM-dd HH:mm:ss.SSS}}{faint} %clr{${LOG_LEVEL_PATTERN}} %clr{${sys:PID}}{magenta} %clr{---}{faint} %clr{[%15.15t]}{faint} %clr{%-40.40c{1.}[%L]}{cyan}  %clr{:}{faint} %m%n${sys:LOG_EXCEPTION_CONVERSION_WORD}</Property>
+        <Property name="FILE_LOG_PATTERN">%d{yyyy-MM-dd HH:mm:ss.SSS} ${LOG_LEVEL_PATTERN} ${sys:PID} --- [%t] %-40.40c{1.}[%L] : %m%n${sys:LOG_EXCEPTION_CONVERSION_WORD}</Property>
+    </Properties>
+    <Appenders>
+        <Console name="Console" target="SYSTEM_OUT" follow="true">
+            <PatternLayout pattern="${sys:CONSOLE_LOG_PATTERN}" />
+        </Console>
+        <RollingFile name="File" fileName="${sys:LOG_FILE}"
+                     filePattern="logs/$${date:yyyy-MM}/application-%d{yyyy-MM-dd-HH}-%i.log.gz">
+            <PatternLayout>
+                <Pattern>${sys:FILE_LOG_PATTERN}</Pattern>
+            </PatternLayout>
+            <Policies>
+                <SizeBasedTriggeringPolicy size="10 MB" />
+            </Policies>
+            <DefaultRollOverStrategy max = "10"/>
+        </RollingFile>
+    </Appenders>
+    <Loggers>
+        <Logger name="org.apache.catalina.startup.DigesterFactory" level="error" />
+        <Logger name="org.apache.catalina.util.LifecycleBase" level="error" />
+        <Logger name="org.apache.coyote.http11.Http11NioProtocol" level="warn" />
+        <logger name="org.apache.sshd.common.util.SecurityUtils" level="warn"/>
+        <Logger name="org.apache.tomcat.util.net.NioSelectorPool" level="warn" />
+        <Logger name="org.crsh.plugin" level="warn" />
+        <logger name="org.crsh.ssh" level="warn"/>
+        <Logger name="org.eclipse.jetty.util.component.AbstractLifeCycle" level="error" />
+        <Logger name="org.hibernate.validator.internal.util.Version" level="warn" />
+        <logger name="org.springframework.boot.actuate.autoconfigure.CrshAutoConfiguration" level="warn"/>
+        <logger name="org.springframework.boot.actuate.endpoint.jmx" level="warn"/>
+        <logger name="org.thymeleaf" level="warn"/>
+        <Root level="info">
+            <AppenderRef ref="Console" />
+            <AppenderRef ref="File" />
+        </Root>
+    </Loggers>
+</Configuration>
diff --git a/service/src/test/java/org/apache/griffin/core/info/GriffinInfoControllerTest.java b/service/src/test/java/org/apache/griffin/core/info/GriffinInfoControllerTest.java
index e6e1941..90e3f3b 100644
--- a/service/src/test/java/org/apache/griffin/core/info/GriffinInfoControllerTest.java
+++ b/service/src/test/java/org/apache/griffin/core/info/GriffinInfoControllerTest.java
@@ -43,6 +43,6 @@
     public void testGreeting() throws Exception {
         mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/version"))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$", is("0.4.0")));
+                .andExpect(jsonPath("$", is("0.5.0")));
     }
 }
diff --git a/service/src/test/java/org/apache/griffin/core/job/JobControllerTest.java b/service/src/test/java/org/apache/griffin/core/job/JobControllerTest.java
index 0bd74e6..6f94e0a 100644
--- a/service/src/test/java/org/apache/griffin/core/job/JobControllerTest.java
+++ b/service/src/test/java/org/apache/griffin/core/job/JobControllerTest.java
@@ -19,15 +19,18 @@
 
 package org.apache.griffin.core.job;
 
+import static org.apache.griffin.core.exception.GriffinExceptionMessage.INSTANCE_ID_DOES_NOT_EXIST;
 import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_ID_DOES_NOT_EXIST;
 import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_NAME_DOES_NOT_EXIST;
 import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinJob;
+import static org.apache.griffin.core.util.EntityMocksHelper.createJobInstance;
 import static org.hamcrest.CoreMatchers.is;
 import static org.mockito.BDDMockito.given;
 import static org.mockito.Mockito.doNothing;
 import static org.mockito.Mockito.doThrow;
 import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
 import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
 import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
 import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
 
@@ -36,6 +39,7 @@
 
 import org.apache.griffin.core.exception.GriffinException;
 import org.apache.griffin.core.exception.GriffinExceptionHandler;
+import org.apache.griffin.core.exception.GriffinExceptionMessage;
 import org.apache.griffin.core.job.entity.AbstractJob;
 import org.apache.griffin.core.job.entity.JobHealth;
 import org.apache.griffin.core.job.entity.JobInstanceBean;
@@ -163,6 +167,37 @@
     }
 
     @Test
+    public void testFindInstance() throws Exception {
+        JobInstanceBean jobInstance = new JobInstanceBean(1L, LivySessionStates
+                .State.RUNNING, "", "", null, null);
+        given(service.findInstance(1L)).willReturn(jobInstance);
+
+        mvc.perform(get(URLHelper.API_VERSION_PATH + "/jobs/instances/1"))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.state", is("RUNNING")));
+    }
+
+    @Test
+    public void testFindInstanceForFailureWithNotFound() throws Exception {
+        Long id = 1L;
+        doThrow(new GriffinException.NotFoundException(INSTANCE_ID_DOES_NOT_EXIST))
+            .when(service).findInstance(id);
+
+        mvc.perform(get(URLHelper.API_VERSION_PATH + "/jobs/instances/1"))
+           .andExpect(status().isNotFound());
+    }
+
+    @Test
+    public void testJobInstanceWithGivenIdNotFound() throws Exception {
+        Long jobInstanceId = 2L;
+        doThrow(new GriffinException.NotFoundException(GriffinExceptionMessage.JOB_INSTANCE_NOT_FOUND))
+                .when(service).findInstance(jobInstanceId);
+
+        mvc.perform(get(URLHelper.API_VERSION_PATH + "/jobs/instances/2"))
+                .andExpect(status().isNotFound());
+    }
+
+    @Test
     public void testGetHealthInfo() throws Exception {
         JobHealth jobHealth = new JobHealth(1, 3);
         given(service.getHealthInfo()).willReturn(jobHealth);
@@ -171,4 +206,23 @@
                 .andExpect(status().isOk())
                 .andExpect(jsonPath("$.healthyJobCount", is(1)));
     }
+
+    @Test
+    public void testTriggerJobForSuccess() throws Exception {
+        Long id = 1L;
+        given(service.triggerJobById(id)).willReturn(null);
+
+        mvc.perform(post(URLHelper.API_VERSION_PATH + "/jobs/trigger/1"))
+                .andExpect(status().isOk());
+    }
+
+    @Test
+    public void testTriggerJobForFailureWithException() throws Exception {
+        doThrow(new GriffinException.ServiceException("Failed to trigger job",
+                new Exception()))
+                .when(service).triggerJobById(1L);
+
+        mvc.perform(post(URLHelper.API_VERSION_PATH + "/jobs/trigger/1"))
+                .andExpect(status().isInternalServerError());
+    }
 }
diff --git a/service/src/test/java/org/apache/griffin/core/job/JobInstanceTest.java b/service/src/test/java/org/apache/griffin/core/job/JobInstanceTest.java
index 8e618de..af74600 100644
--- a/service/src/test/java/org/apache/griffin/core/job/JobInstanceTest.java
+++ b/service/src/test/java/org/apache/griffin/core/job/JobInstanceTest.java
@@ -30,6 +30,7 @@
 import static org.mockito.Mockito.verify;
 
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Properties;
 
@@ -51,6 +52,7 @@
 import org.quartz.Scheduler;
 import org.quartz.Trigger;
 import org.quartz.TriggerKey;
+import org.quartz.impl.triggers.AbstractTrigger;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.boot.test.context.TestConfiguration;
@@ -115,6 +117,7 @@
         JobDetail jd = createJobDetail(JsonUtil.toJson(measure), "");
         BatchJob job = new BatchJob(1L, "jobName",
                 "qName", "qGroup", false);
+        job.setConfigMap(new HashMap<>());
         List<Trigger> triggers = Arrays.asList(createSimpleTrigger(2, 0));
         given(context.getJobDetail()).willReturn(jd);
         given(measureRepo.findOne(Matchers.anyLong())).willReturn(measure);
@@ -127,6 +130,9 @@
         given(jobRepo.save(Matchers.any(BatchJob.class))).willReturn(job);
         given(scheduler.checkExists(Matchers.any(JobKey.class)))
                 .willReturn(false);
+        Trigger trigger = mock(Trigger.class);
+        given(context.getTrigger()).willReturn(trigger);
+        given(trigger.getKey()).willReturn(new TriggerKey("test"));
         jobInstance.execute(context);
 
         verify(measureRepo, times(1)).findOne(Matchers.anyLong());
diff --git a/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java
new file mode 100644
index 0000000..d48b824
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java
@@ -0,0 +1,91 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.job;
+
+import org.apache.griffin.core.exception.GriffinException;
+import org.apache.griffin.core.job.entity.AbstractJob;
+import org.apache.griffin.core.job.entity.JobInstanceBean;
+import org.apache.griffin.core.job.repo.JobInstanceRepo;
+import org.apache.griffin.core.job.repo.JobRepo;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.quartz.*;
+import org.springframework.scheduling.quartz.SchedulerFactoryBean;
+import org.springframework.test.context.junit4.SpringRunner;
+
+import java.util.Collections;
+
+import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinJob;
+import static org.apache.griffin.core.util.EntityMocksHelper.createJobInstance;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.BDDMockito.given;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.internal.verification.VerificationModeFactory.times;
+
+@RunWith(SpringRunner.class)
+public class JobServiceImplTest {
+
+    @Mock
+    private JobRepo<AbstractJob> jobRepo;
+
+    @Mock
+    private SchedulerFactoryBean factory;
+
+    @Mock
+    private JobInstanceRepo instanceRepo;
+
+    @InjectMocks
+    private JobServiceImpl jobService;
+
+
+    @Test
+    public void testTriggerJobById() throws SchedulerException {
+        Long jobId = 1L;
+        AbstractJob job = createGriffinJob();
+        given(jobRepo.findByIdAndDeleted(jobId,false)).willReturn(job);
+        Scheduler scheduler = mock(Scheduler.class);
+        given(scheduler.checkExists(any(JobKey.class))).willReturn(true);
+        ListenerManager listenerManager = mock(ListenerManager.class);
+        given(scheduler.getListenerManager()).willReturn(listenerManager);
+        given(factory.getScheduler()).willReturn(scheduler);
+        JobInstanceBean jobInstanceBean = createJobInstance();
+        given(instanceRepo.findByTriggerKey(anyString())).willReturn(Collections.singletonList(jobInstanceBean));
+
+        String result = jobService.triggerJobById(jobId);
+
+        assertTrue(result.matches("DEFAULT\\.[0-9a-f\\-]{49}"));
+        verify(scheduler, times(1)).scheduleJob(any());
+    }
+
+
+    @Test(expected = GriffinException.NotFoundException.class)
+    public void testTriggerJobByIdFail() throws SchedulerException {
+        Long jobId = 1L;
+        given(jobRepo.findByIdAndDeleted(jobId,false)).willReturn(null);
+        jobService.triggerJobById(jobId);
+    }
+}
diff --git a/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java b/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
index e9a481e..543432e 100644
--- a/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
+++ b/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
@@ -19,19 +19,17 @@
 
 package org.apache.griffin.core.job;
 
-import static org.apache.griffin.core.util.EntityMocksHelper.createFileExistPredicate;
-import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinMeasure;
-import static org.apache.griffin.core.util.EntityMocksHelper.createJobDetail;
-import static org.apache.griffin.core.util.EntityMocksHelper.createJobInstance;
-import static org.apache.griffin.core.util.EntityMocksHelper.createSimpleTrigger;
+import static org.apache.griffin.core.util.EntityMocksHelper.*;
 import static org.mockito.BDDMockito.given;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.Properties;
 
+import org.apache.griffin.core.config.PropertiesConfig;
 import org.apache.griffin.core.job.entity.JobInstanceBean;
 import org.apache.griffin.core.job.entity.SegmentPredicate;
 import org.apache.griffin.core.job.repo.JobInstanceRepo;
@@ -53,7 +51,6 @@
 import org.springframework.test.context.junit4.SpringRunner;
 import org.springframework.web.client.RestTemplate;
 
-
 @RunWith(SpringRunner.class)
 public class SparkSubmitJobTest {
 
@@ -70,7 +67,10 @@
             return PropertiesUtil.getProperties(path,
                     new ClassPathResource(path));
         }
-
+        @Bean
+        public PropertiesConfig sparkConf() {
+            return new PropertiesConfig("src/test/resources", null);
+        }
     }
 
     @Autowired
@@ -88,6 +88,8 @@
     @MockBean
     private BatchJobOperatorImpl batchJobOp;
 
+    @MockBean
+    private LivyTaskSubmitHelper livyTaskSubmitHelper;
 
     @Before
     public void setUp() {
@@ -148,9 +150,9 @@
         given(context.getJobDetail()).willReturn(jd);
         given(jobInstanceRepo.findByPredicateName(Matchers.anyString()))
                 .willReturn(instance);
-        Whitebox.setInternalState(sparkSubmitJob, "restTemplate", restTemplate);
-        given(restTemplate.postForObject(Matchers.anyString(), Matchers.any(),
-                Matchers.any())).willReturn(result);
+//        Whitebox.setInternalState(sparkSubmitJob, "restTemplate", restTemplate);
+//        given(restTemplate.postForObject(Matchers.anyString(), Matchers.any(),
+//                Matchers.any())).willReturn(result);
 
         sparkSubmitJob.execute(context);
 
@@ -183,4 +185,25 @@
         sparkSubmitJob.execute(context);
     }
 
+    @Test
+    public void testMultiplePredicatesWhichReturnsTrue() throws Exception {
+        JobExecutionContext context = mock(JobExecutionContext.class);
+        JobInstanceBean instance = createJobInstance();
+        GriffinMeasure measure = createGriffinMeasure("measureName");
+        SegmentPredicate predicate = createMockPredicate();
+        SegmentPredicate secondPredicate = createMockPredicate();
+        JobDetail jd = createJobDetail(JsonUtil.toJson(measure), JsonUtil.toJson
+                (Arrays.asList(predicate, secondPredicate)));
+        given(context.getJobDetail()).willReturn(jd);
+        given(context.getTrigger()).willReturn(createSimpleTrigger(4, 5));
+        given(jobInstanceRepo.findByPredicateName(Matchers.anyString()))
+                .willReturn(instance);
+        sparkSubmitJob.execute(context);
+
+        verify(context, times(1)).getJobDetail();
+        verify(jobInstanceRepo, times(1)).findByPredicateName(
+                Matchers.anyString());
+        verify(jobInstanceRepo, times(1)).save(instance);
+    }
+
 }
diff --git a/service/src/test/java/org/apache/griffin/core/job/factory/PredicatorFactoryTest.java b/service/src/test/java/org/apache/griffin/core/job/factory/PredicatorFactoryTest.java
new file mode 100644
index 0000000..b289e24
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/job/factory/PredicatorFactoryTest.java
@@ -0,0 +1,66 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.job.factory;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.griffin.core.exception.GriffinException;
+import org.apache.griffin.core.job.FileExistPredicator;
+import org.apache.griffin.core.job.Predicator;
+import org.apache.griffin.core.job.entity.SegmentPredicate;
+import org.apache.griffin.core.util.PredicatorMock;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.test.context.junit4.SpringRunner;
+
+import java.io.IOException;
+import java.util.HashMap;
+
+import static org.apache.griffin.core.util.EntityMocksHelper.createFileExistPredicate;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+@RunWith(SpringRunner.class)
+public class PredicatorFactoryTest {
+
+    @Test
+    public void testFileExistPredicatorCreation() throws IOException {
+        Predicator predicator = PredicatorFactory.newPredicateInstance(createFileExistPredicate());
+        assertNotNull(predicator);
+        assertTrue(predicator instanceof FileExistPredicator);
+    }
+
+    @Test(expected = GriffinException.NotFoundException.class)
+    public void testUnknownPredicator() throws JsonProcessingException {
+        PredicatorFactory.newPredicateInstance(
+                new SegmentPredicate("unknown", null));
+    }
+
+    @Test
+    public void testPluggablePredicator() throws JsonProcessingException {
+        String predicatorClass = "org.apache.griffin.core.util.PredicatorMock";
+        HashMap<String, Object> map = new HashMap<>();
+        map.put("class", predicatorClass);
+        SegmentPredicate segmentPredicate = new SegmentPredicate("custom", null);
+        segmentPredicate.setConfigMap(map);
+        Predicator predicator = PredicatorFactory.newPredicateInstance(segmentPredicate);
+        assertNotNull(predicator);
+        assertTrue(predicator instanceof PredicatorMock);
+    }
+}
diff --git a/service/src/test/java/org/apache/griffin/core/job/repo/JobInstanceRepoTest.java b/service/src/test/java/org/apache/griffin/core/job/repo/JobInstanceRepoTest.java
index 76b81df..508a663 100644
--- a/service/src/test/java/org/apache/griffin/core/job/repo/JobInstanceRepoTest.java
+++ b/service/src/test/java/org/apache/griffin/core/job/repo/JobInstanceRepoTest.java
@@ -31,8 +31,10 @@
 import static org.apache.griffin.core.job.entity.LivySessionStates.State.SUCCESS;
 import static org.assertj.core.api.Assertions.assertThat;
 
+import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.avro.generic.GenericData;
 import org.apache.griffin.core.config.EclipseLinkJpaConfigForTest;
 import org.apache.griffin.core.job.entity.BatchJob;
 import org.apache.griffin.core.job.entity.JobInstanceBean;
@@ -57,6 +59,8 @@
     @Autowired
     private JobInstanceRepo jobInstanceRepo;
 
+    private List<Long> entityIds;
+
     @Before
     public void setup() {
         entityManager.clear();
@@ -79,6 +83,12 @@
     }
 
     @Test
+    public void testFindByInstanceId() {
+        JobInstanceBean bean = jobInstanceRepo.findByInstanceId(entityIds.get(0));
+        assertThat(bean).isNotNull();
+    }
+
+    @Test
     public void testFindByExpireTmsLessThanEqual() {
         List<JobInstanceBean> beans = jobInstanceRepo
                 .findByExpireTmsLessThanEqual(1516004640092L);
@@ -128,5 +138,10 @@
         entityManager.persistAndFlush(bean2);
         entityManager.persistAndFlush(bean3);
         entityManager.persistAndFlush(bean4);
+        entityIds = new ArrayList<>();
+        entityIds.add(bean1.getId());
+        entityIds.add(bean2.getId());
+        entityIds.add(bean3.getId());
+        entityIds.add(bean4.getId());
     }
 }
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
index e78bf3f..df98423 100644
--- a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
+++ b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
@@ -19,31 +19,26 @@
 
 package org.apache.griffin.core.metastore.hive;
 
-import static org.hamcrest.Matchers.hasSize;
-import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.nullValue;
-import static org.mockito.BDDMockito.given;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
 import org.apache.griffin.core.util.URLHelper;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
 import org.springframework.boot.test.mock.mockito.MockBean;
 import org.springframework.test.context.junit4.SpringRunner;
 import org.springframework.test.web.servlet.MockMvc;
 
+import java.util.*;
+
+import static org.hamcrest.Matchers.*;
+import static org.mockito.BDDMockito.given;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
 @RunWith(SpringRunner.class)
 @WebMvcTest(value = HiveMetaStoreController.class, secure = false)
 public class HiveMetaStoreControllerTest {
@@ -52,6 +47,7 @@
     private MockMvc mockMvc;
 
     @MockBean
+    @Qualifier(value = "metastoreSvc")
     private HiveMetaStoreService hiveMetaStoreService;
 
 
@@ -117,7 +113,7 @@
         String tableName = "table";
         given(hiveMetaStoreService.getTable(dbName, tableName)).willReturn(
                 new Table(tableName, null, null, 0, 0, 0, null, null,
-                null, null, null, null));
+                        null, null, null, null));
 
         mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/metadata/hive/table")
                 .param("db", dbName).param("table",
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
index a24f9b3..113ebf2 100644
--- a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
+++ b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
@@ -19,41 +19,42 @@
 
 package org.apache.griffin.core.metastore.hive;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.BDDMockito.given;
-import static org.mockito.Mockito.*;
-
-import java.util.Arrays;
-import java.util.List;
-
 import org.apache.griffin.core.config.CacheConfig;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.thrift.TException;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.TestConfiguration;
 import org.springframework.boot.test.mock.mockito.MockBean;
 import org.springframework.cache.CacheManager;
 import org.springframework.cache.annotation.EnableCaching;
 import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
 import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.test.context.ContextConfiguration;
 import org.springframework.test.context.junit4.SpringRunner;
 
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.BDDMockito.given;
+import static org.mockito.Mockito.*;
+
 
 @RunWith(SpringRunner.class)
+@ContextConfiguration
 public class HiveMetaStoreServiceImplTest {
 
-    @TestConfiguration
+    @Configuration
     @EnableCaching
     public static class HiveMetaStoreServiceConfiguration extends CacheConfig {
         @Bean("hiveMetaStoreServiceImpl")
-        public HiveMetaStoreService service() {
+        public HiveMetaStoreServiceImpl service() {
             return new HiveMetaStoreServiceImpl();
         }
 
@@ -78,13 +79,13 @@
     }
 
     @Test
-    public void testGetAllDatabasesForNormalRun() throws MetaException {
+    public void testGetAllDatabasesForNormalRun() throws TException {
         given(client.getAllDatabases()).willReturn(Arrays.asList("default"));
         assertEquals(service.getAllDatabases().iterator().hasNext(), true);
     }
 
     @Test
-    public void testGetAllDatabasesForMetaException() throws MetaException {
+    public void testGetAllDatabasesForMetaException() throws TException {
         given(client.getAllDatabases()).willThrow(MetaException.class);
         doNothing().when(client).reconnect();
         assertTrue(service.getAllDatabases() == null);
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceJDBCImplTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceJDBCImplTest.java
new file mode 100644
index 0000000..faad170
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceJDBCImplTest.java
@@ -0,0 +1,147 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+
+import org.apache.griffin.core.config.CacheConfig;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.cache.CacheManager;
+import org.springframework.cache.annotation.EnableCaching;
+import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
+import org.springframework.context.annotation.Bean;
+import org.springframework.test.context.junit4.SpringRunner;
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.List;
+
+import static org.mockito.Matchers.anyInt;
+import static org.mockito.Matchers.anyString;
+import static org.powermock.api.mockito.PowerMockito.when;
+
+
+@RunWith(SpringRunner.class)
+public class HiveMetastoreServiceJDBCImplTest {
+
+    @TestConfiguration
+    @EnableCaching
+    public static class HiveMetaStoreServiceConfiguration extends CacheConfig {
+        @Bean("hiveMetaStoreServiceJdbcImpl")
+        public HiveMetaStoreServiceJdbcImpl serviceJDBC() {
+            return new HiveMetaStoreServiceJdbcImpl();
+        }
+
+        @Bean
+        CacheManager cacheManager() {
+            return new ConcurrentMapCacheManager("jdbcHive");
+        }
+    }
+
+    private HiveMetaStoreServiceJdbcImpl serviceJdbc = new HiveMetaStoreServiceJdbcImpl();
+
+    @Mock
+    private Connection conn;
+
+    @Mock
+    private Statement stmt;
+
+    @Mock
+    private ResultSet rs;
+
+    @Before
+    public void setUp() throws SQLException {
+        serviceJdbc.setConn(conn);
+        serviceJdbc.setHiveClassName("org.apache.hive.jdbc.HiveDriver");
+        serviceJdbc.setNeedKerberos("true");
+        serviceJdbc.setKeytabPath("/path/to/keytab");
+        serviceJdbc.setKeytabUser("user");
+    }
+
+    @Test
+    public void testGetComment() {
+        String colStr = "`session_date` string COMMENT 'this is session date'";
+        String comment = serviceJdbc.getComment(colStr);
+        assert (comment.equals("this is session date"));
+
+        colStr = "`session_date` string COMMENT ''";
+        comment = serviceJdbc.getComment(colStr);
+        Assert.assertTrue(comment.isEmpty());
+    }
+
+    @Test
+    public void testgetAllDatabases() throws SQLException {
+        when(conn.createStatement()).thenReturn(stmt);
+        when(stmt.executeQuery(anyString())).thenReturn(rs);
+        when(rs.next()).thenReturn(true).thenReturn(false);
+        when(rs.getString(anyInt())).thenReturn("default");
+
+        Iterable<String> res = serviceJdbc.getAllDatabases();
+        for (String s : res) {
+            Assert.assertEquals(s, "default");
+            break;
+        }
+    }
+
+    @Test
+    public void testGetAllTableNames() throws SQLException {
+        when(conn.createStatement()).thenReturn(stmt);
+        when(stmt.executeQuery(anyString())).thenReturn(rs);
+        when(rs.next()).thenReturn(true).thenReturn(true).thenReturn(false);
+        when(rs.getString(anyInt())).thenReturn("session_data").thenReturn("session_summary");
+
+        Iterable<String> res = serviceJdbc.getAllTableNames("default");
+        StringBuilder sb = new StringBuilder();
+        for (String s : res) {
+            sb.append(s).append(",");
+        }
+        Assert.assertEquals(sb.toString(), "session_data,session_summary,");
+    }
+
+    @Test
+    public void testGetTable() throws SQLException {
+        String meta = "CREATE EXTERNAL TABLE `default.session_data`(  `session_date` string COMMENT 'this is session date',   `site_id` int COMMENT '',   `guid` string COMMENT '',   `user_id` string COMMENT '')COMMENT 'session_data for session team' PARTITIONED BY (   `dt` string,   `place` int) ROW FORMAT SERDE   'org.apache.hadoop.hive.serde2.avro.AvroSerDe' STORED AS INPUTFORMAT   'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' OUTPUTFORMAT   'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' LOCATION 'hdfs://localhost/session/common/session_data'TBLPROPERTIES (  'COLUMN_STATS_ACCURATE'='false',   'avro.schema.url'='hdfs://localhost/griffin/session/avro/session-data-1.0.avsc',   'transient_lastDdlTime'='1535651637')";
+        when(conn.createStatement()).thenReturn(stmt);
+        when(stmt.executeQuery(anyString())).thenReturn(rs);
+        when(rs.next()).thenReturn(true).thenReturn(false);
+        when(rs.getString(anyInt())).thenReturn(meta);
+
+        Table res = serviceJdbc.getTable("default", "session_data");
+
+        assert (res.getDbName().equals("default"));
+        assert (res.getTableName().equals("session_data"));
+        assert (res.getSd().getLocation().equals("hdfs://localhost/session/common/session_data"));
+        List<FieldSchema> fieldSchemas = res.getSd().getCols();
+        for (FieldSchema fieldSchema : fieldSchemas) {
+            Assert.assertEquals(fieldSchema.getName(),"session_date");
+            Assert.assertEquals(fieldSchema.getType(),"string");
+            Assert.assertEquals(fieldSchema.getComment(),"this is session date");
+            break;
+        }
+    }
+}
\ No newline at end of file
diff --git a/service/src/test/java/org/apache/griffin/core/metric/MetricServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/metric/MetricServiceImplTest.java
index aa55f7a..39837f0 100644
--- a/service/src/test/java/org/apache/griffin/core/metric/MetricServiceImplTest.java
+++ b/service/src/test/java/org/apache/griffin/core/metric/MetricServiceImplTest.java
@@ -33,6 +33,9 @@
 
 import org.apache.griffin.core.exception.GriffinException;
 import org.apache.griffin.core.job.entity.AbstractJob;
+import org.apache.griffin.core.job.entity.JobInstanceBean;
+import org.apache.griffin.core.job.entity.LivySessionStates;
+import org.apache.griffin.core.job.repo.JobInstanceRepo;
 import org.apache.griffin.core.job.repo.JobRepo;
 import org.apache.griffin.core.measure.entity.Measure;
 import org.apache.griffin.core.measure.repo.MeasureRepo;
@@ -63,6 +66,8 @@
     private JobRepo<AbstractJob> jobRepo;
     @Mock
     private MetricStoreImpl metricStore;
+    @Mock
+    private JobInstanceRepo jobInstanceRepo;
 
     @Autowired
     private Environment env;
@@ -204,4 +209,43 @@
 
     }
 
+    @Test
+    public void testFindMetricSuccess() throws IOException {
+        Long id = 1L;
+        String appId = "application";
+        MetricValue expectedMetric = new MetricValue(
+                "name", 1234L, Collections.singletonMap("applicationId", appId), new HashMap<>());
+
+        given(jobInstanceRepo.findByInstanceId(id))
+                .willReturn(new JobInstanceBean(LivySessionStates.State.RUNNING, 12L, 32L, appId));
+        given(metricStore.getMetric(appId))
+                .willReturn(expectedMetric);
+        MetricValue actualMetric = service.findMetric(id);
+
+        assertEquals(expectedMetric, actualMetric);
+    }
+
+    @Test(expected = GriffinException.NotFoundException.class)
+    public void testFailedToFindJobInstance() throws IOException {
+        Long id = 1L;
+        given(jobInstanceRepo.findByInstanceId(id))
+                .willReturn(null);
+        service.findMetric(id);
+
+    }
+
+    @Test(expected = GriffinException.ServiceException.class)
+    public void testFindMetricFailure() throws IOException {
+        Long id = 1L;
+        String appId = "application";
+
+        given(jobInstanceRepo.findByInstanceId(id))
+                .willReturn(new JobInstanceBean(LivySessionStates.State.RUNNING, 12L, 32L, appId));
+        given(metricStore.getMetric(appId))
+                .willThrow(new GriffinException.ServiceException("", new RuntimeException()));
+        service.findMetric(id);
+
+    }
+
+
 }
diff --git a/service/src/test/java/org/apache/griffin/core/metric/MetricStoreImplTest.java b/service/src/test/java/org/apache/griffin/core/metric/MetricStoreImplTest.java
index d3cbfb0..b91f6ba 100644
--- a/service/src/test/java/org/apache/griffin/core/metric/MetricStoreImplTest.java
+++ b/service/src/test/java/org/apache/griffin/core/metric/MetricStoreImplTest.java
@@ -19,15 +19,55 @@
 
 package org.apache.griffin.core.metric;
 
-import static org.junit.Assert.assertTrue;
+import org.apache.griffin.core.metric.model.MetricValue;
+import org.apache.http.HttpEntity;
+import org.elasticsearch.client.Response;
+import org.elasticsearch.client.RestClient;
+import org.elasticsearch.client.RestClientBuilder;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 
+import java.io.IOException;
+import java.io.InputStream;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
+import java.net.URISyntaxException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
 
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.BDDMockito.*;
 
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({RestClient.class, RestClientBuilder.class})
+@PowerMockIgnore("javax.management.*")
 public class MetricStoreImplTest {
 
+    private static final String INDEX = "griffin";
+    private static final String TYPE = "accuracy";
+
+    private static final String urlBase = String.format("/%s/%s", INDEX, TYPE);
+    private static final String urlGet = urlBase.concat("/_search?filter_path=hits.hits._source");
+
+    private RestClient restClientMock;
+
+    @Before
+    public void setup(){
+        PowerMockito.mockStatic(RestClient.class);
+        restClientMock = PowerMockito.mock(RestClient.class);
+        RestClientBuilder restClientBuilderMock = PowerMockito.mock(RestClientBuilder.class);
+
+        given(RestClient.builder(anyVararg())).willReturn(restClientBuilderMock);
+        given(restClientBuilderMock.build()).willReturn(restClientMock);
+    }
+
     @Test
     public void testBuildBasicAuthString()
             throws NoSuchMethodException, InvocationTargetException,
@@ -40,4 +80,39 @@
         assertTrue(authStr.equals("Basic dXNlcjpwYXNzd29yZA=="));
     }
 
+    @Test
+    public void testMetricGetting() throws IOException, URISyntaxException {
+        //given
+        Response responseMock = PowerMockito.mock(Response.class);
+        HttpEntity httpEntityMock = PowerMockito.mock(HttpEntity.class);
+        InputStream is = Thread.currentThread().getContextClassLoader()
+                .getResourceAsStream("metricvalue.json");
+        Map<String, String> map = new HashMap<>();
+        map.put("q", "metadata.applicationId:application_1549876136110_0018");
+
+        Map<String, Object> value = new HashMap<String, Object>(){{
+            put("total", 74);
+            put("miss", 0);
+            put("matched", 74);
+            put("matchedFraction", 1);
+        }};
+        MetricValue expectedMetric = new MetricValue("de_demo_results_comparision",
+                1549985089648L,
+                Collections.singletonMap("applicationId", "application_1549876136110_0018"),
+                value);
+
+
+        given(restClientMock.performRequest(eq("GET"), eq(urlGet), eq(map), anyVararg())).willReturn(responseMock);
+        given(responseMock.getEntity()).willReturn(httpEntityMock);
+        given(httpEntityMock.getContent()).willReturn(is);
+
+        //when
+        MetricStoreImpl metricStore = new MetricStoreImpl("", 0, "", "", "");
+        MetricValue metric = metricStore.getMetric("application_1549876136110_0018");
+
+        //then
+        PowerMockito.verifyStatic();
+        assertEquals(expectedMetric, metric);
+    }
+
 }
diff --git a/service/src/test/java/org/apache/griffin/core/util/EntityMocksHelper.java b/service/src/test/java/org/apache/griffin/core/util/EntityMocksHelper.java
index e34b39f..563210d 100644
--- a/service/src/test/java/org/apache/griffin/core/util/EntityMocksHelper.java
+++ b/service/src/test/java/org/apache/griffin/core/util/EntityMocksHelper.java
@@ -217,6 +217,7 @@
         jobDataMap.put(MEASURE_KEY, measureJson);
         jobDataMap.put(PREDICATES_KEY, predicatesJson);
         jobDataMap.put(JOB_NAME, "jobName");
+        jobDataMap.put("jobName", "jobName");
         jobDataMap.put(PREDICATE_JOB_NAME, "predicateJobName");
         jobDataMap.put(GRIFFIN_JOB_ID, 1L);
         jobDetail.setJobDataMap(jobDataMap);
@@ -224,11 +225,24 @@
     }
 
     public static SegmentPredicate createFileExistPredicate()
-        throws JsonProcessingException {
+            throws IOException {
         Map<String, String> config = new HashMap<>();
         config.put("root.path", "hdfs:///griffin/demo_src");
         config.put("path", "/dt=#YYYYMMdd#/hour=#HH#/_DONE");
-        return new SegmentPredicate("file.exist", config);
+        SegmentPredicate segmentPredicate = new SegmentPredicate("file.exist", config);
+        segmentPredicate.setId(1L);
+        segmentPredicate.load();
+        return segmentPredicate;
+    }
+
+    public static SegmentPredicate createMockPredicate()
+            throws IOException {
+        Map<String, String> config = new HashMap<>();
+        config.put("class", "org.apache.griffin.core.util.PredicatorMock");
+        SegmentPredicate segmentPredicate = new SegmentPredicate("custom", config);
+        segmentPredicate.setId(1L);
+        segmentPredicate.load();
+        return segmentPredicate;
     }
 
     public static Map<String, Object> createJobDetailMap() {
diff --git a/service/src/test/java/org/apache/griffin/core/util/PredicatorMock.java b/service/src/test/java/org/apache/griffin/core/util/PredicatorMock.java
new file mode 100644
index 0000000..bc7a6e2
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/util/PredicatorMock.java
@@ -0,0 +1,35 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.util;
+
+import org.apache.griffin.core.job.Predicator;
+import org.apache.griffin.core.job.entity.SegmentPredicate;
+
+import java.io.IOException;
+
+public class PredicatorMock implements Predicator {
+    public PredicatorMock(SegmentPredicate segmentPredicate) {
+    }
+
+    @Override
+    public boolean predicate() throws IOException {
+        return true;
+    }
+}
diff --git a/service/src/test/resources/application.properties b/service/src/test/resources/application.properties
index 86d5316..7864be9 100644
--- a/service/src/test/resources/application.properties
+++ b/service/src/test/resources/application.properties
@@ -59,3 +59,5 @@
 # elasticsearch.password = password
 # griffin event listener
 internal.event.listeners=GriffinJobEventHook,GriffinTestJobEventHook
+
+logging.file=logs/griffin-service.log
\ No newline at end of file
diff --git a/service/src/test/resources/metricvalue.json b/service/src/test/resources/metricvalue.json
new file mode 100644
index 0000000..690c1c4
--- /dev/null
+++ b/service/src/test/resources/metricvalue.json
@@ -0,0 +1,51 @@
+{
+  "took": 47,
+  "timed_out": false,
+  "_shards": {
+    "total": 5,
+    "successful": 5,
+    "skipped": 0,
+    "failed": 0
+  },
+  "hits": {
+    "total": 17,
+    "max_score": 3.226844,
+    "hits": [
+      {
+        "_index": "griffin",
+        "_type": "accuracy",
+        "_id": "RZFP4mgBkZqzqlKSwWtJ",
+        "_score": 3.226844,
+        "_source": {
+          "name": "de_demo_results_comparision",
+          "tmst": 1549985089648,
+          "value": {
+            "total": 74,
+            "miss": 0,
+            "matched": 74,
+            "matchedFraction": 1
+          },
+          "metadata": {
+            "applicationId": "application_1549876136110_0018"
+          }
+        }
+      },
+      {
+        "_index": "griffin",
+        "_type": "accuracy",
+        "_id": "taMpvmgBfOpRJiYFj5Xg",
+        "_score": 2.4107988,
+        "_source": {
+          "name": "de_demo_results_comparision",
+          "tmst": 1549378607658,
+          "value": {
+            "total": 74,
+            "miss": 0,
+            "matched": 74,
+            "matchedFraction": 1
+          }
+        }
+      }
+    ]
+  }
+}
\ No newline at end of file
diff --git a/ui/angular/src/app/job/job.component.html b/ui/angular/src/app/job/job.component.html
index 2fb87a2..2b7cd9d 100644
--- a/ui/angular/src/app/job/job.component.html
+++ b/ui/angular/src/app/job/job.component.html
@@ -77,7 +77,7 @@
           &nbsp;
           <a (click)="remove(row)" title="delete" style="text-decoration:none">
             <i class="fa fa-trash-o po"></i>
-          </a> &nbsp;
+          </a>&nbsp;
           <a routerLink="/job/{{row.id}}" title="subscribe">
             <i class="fa fa-eye"></i>
           </a>&nbsp;
@@ -86,6 +86,9 @@
           </a>
           <a *ngIf="row.action!=='START'" (click)="stateMag(row)" title="Stop" style="text-decoration:none">
             <i class="fa fa-stop"></i>
+          </a>&nbsp;
+          <a (click)="trigger(row)" title="trigger now" style="text-decoration:none">
+            <i class="fa fa-caret-square-o-right po"></i>
           </a>
         </td>
         <td>
diff --git a/ui/angular/src/app/job/job.component.ts b/ui/angular/src/app/job/job.component.ts
index 0a86fe5..893db64 100644
--- a/ui/angular/src/app/job/job.component.ts
+++ b/ui/angular/src/app/job/job.component.ts
@@ -45,6 +45,7 @@
   action: string;
   modalWndMsg: string;
   isStop: boolean;
+  isTrigger: boolean;
 
   private toasterService: ToasterService;
 
@@ -101,6 +102,19 @@
           console.log("Error when manage job state");
         });
     }
+    else if (this.isTrigger) {
+      $("#save").attr("disabled", "true");
+      let actionUrl = this.serviceService.config.uri.triggerJobById + "/" + this.deleteId;
+      this.http.post(actionUrl, {}).subscribe(data => {
+          let self = this;
+          self.hide();
+          this.isTrigger = false;
+        },
+        err => {
+          this.toasterService.pop("error", "Error!", "Failed to trigger job!");
+          console.log("Error when trigger job");
+        });
+    }
     else {
       let deleteJob = this.serviceService.config.uri.deleteJob;
       let deleteUrl = deleteJob + "/" + this.deleteId;
@@ -196,4 +210,15 @@
       this.results = Object.assign([], trans).reverse();
     });
   }
+
+  trigger(row): void {
+    $("#save").removeAttr("disabled");
+    this.modalWndMsg = "Trigger the job with the below information?";
+    this.visible = true;
+    setTimeout(() => (this.visibleAnimate = true), 100);
+    this.deletedRow = row;
+    this.deleteIndex = this.results.indexOf(row);
+    this.deleteId = row.id;
+    this.isTrigger = true;
+  }
 }
diff --git a/ui/angular/src/app/service/service.service.ts b/ui/angular/src/app/service/service.service.ts
index 7d50b4b..57093f4 100644
--- a/ui/angular/src/app/service/service.service.ts
+++ b/ui/angular/src/app/service/service.service.ts
@@ -92,6 +92,7 @@
       addJobs: this.BACKEND_SERVER + this.API_ROOT_PATH + "/jobs",
       modifyJobs: this.BACKEND_SERVER + this.API_ROOT_PATH + "/jobs",
       getJobById: this.BACKEND_SERVER + this.API_ROOT_PATH + "/jobs/config",
+      triggerJobById: this.BACKEND_SERVER + this.API_ROOT_PATH + "/jobs/trigger",
       getMeasuresByOwner:
       this.BACKEND_SERVER + this.API_ROOT_PATH + "/measures/owner/",
       deleteJob: this.BACKEND_SERVER + this.API_ROOT_PATH + "/jobs",
diff --git a/ui/pom.xml b/ui/pom.xml
index 6262415..d42cb6a 100644
--- a/ui/pom.xml
+++ b/ui/pom.xml
@@ -24,7 +24,7 @@
     <parent>
         <groupId>org.apache.griffin</groupId>
         <artifactId>griffin</artifactId>
-        <version>0.4.0-SNAPSHOT</version>
+        <version>0.6.0-SNAPSHOT</version>
     </parent>
     <artifactId>ui</artifactId>
     <packaging>pom</packaging>
@@ -34,7 +34,6 @@
 
     <properties>
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-        <java.version>1.8</java.version>
         <node.version>v6.11.3</node.version>
         <npm.version>3.10.10</npm.version>
     </properties>
@@ -44,12 +43,9 @@
         <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
-            <version>3.8.1</version>
-            <scope>test</scope>
         </dependency>
     </dependencies>
 
-
     <build>
         <plugins>
             <plugin>
@@ -137,11 +133,6 @@
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-compiler-plugin</artifactId>
-                <version>3.6.1</version>
-                <configuration>
-                    <source>1.8</source>
-                    <target>1.8</target>
-                </configuration>
             </plugin>
         </plugins>
     </build>