Merge remote-tracking branch 'origin/trunk' into ambari-rest-api-explorer
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/ClustersManageAccessCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/ClustersManageAccessCtrl.js
index e1b74aa..3a9ad67 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/ClustersManageAccessCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/ClustersManageAccessCtrl.js
@@ -37,7 +37,7 @@
       });
       var orderedRoles = Cluster.orderedRoles;
       var pms = [];
-      for (var key in orderedRoles) {
+      for (var key=0;key<orderedRoles.length;key++) {
         pms.push($scope.permissions[orderedRoles[key]]);
       }
       $scope.permissions = pms;
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index b3c27dc..69c35c0 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@ -29,6 +29,7 @@
   $scope.stackIds = [];
   $scope.allVersions = [];
   $scope.networkLost = false;
+  $scope.stackRepoUpdateLinkExists = true;
   $scope.skipValidation = false;
   $scope.useRedhatSatellite = false;
 
@@ -543,6 +544,23 @@
     })[0];
   };
 
+  /**
+   * Return true if at least one stacks have the repo URL link in the repoinfo.xml
+   * @return boolean
+   * */
+  $scope.setStackRepoUpdateLinkExists = function (versions) {
+    var stackRepoUpdateLinkExists = versions.find(function(_version){
+      return _version.stackRepoUpdateLinkExists;
+    });
+
+    //Found at least one version with the stack repo update link
+    if (stackRepoUpdateLinkExists){
+      $scope.stackRepoUpdateLinkExists = true;
+    } else {
+      $scope.stackRepoUpdateLinkExists = false;
+    }
+  };
+
   $scope.setNetworkIssues = function (versions) {
    $scope.networkLost = !versions.find(function(_version){
      return !_version.stackDefault;
@@ -576,6 +594,7 @@
         $scope.selectedPublicRepoVersion = $scope.activeStackVersion;
         $scope.setVersionSelected($scope.activeStackVersion);
         $scope.setNetworkIssues(versions);
+        $scope.setStackRepoUpdateLinkExists(versions);
         $scope.validateRepoUrl();
         $scope.availableStackRepoList = versions.length == 1 ? [] : versions;
       }
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
index c6ba241..a84a97c 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
@@ -24,15 +24,15 @@
     os: $t('versions.os')
   };
   $scope.editController = true;
-  $scope.osList = [];
+  $scope.osList = []; // view modal for display repo urls of various OSes
   $scope.skipValidation = false;
   $scope.useRedhatSatellite = false;
-  $scope.selectedOS = 0;
   $scope.upgradeStack = {
     stack_name: '',
     stack_version: '',
     display_name: ''
   };
+  $scope.defaulfOSRepos = {}; // a copy of initial loaded repo info for "changed" check later
 
   $scope.loadStackVersionInfo = function () {
     return Stack.getRepo($routeParams.versionId, $routeParams.stackName).then(function (response) {
@@ -54,8 +54,6 @@
             var skipServices = ['MAPREDUCE2', 'GANGLIA', 'KERBEROS'];
             return skipServices.indexOf(service.name) === -1;
           }) || [];
-      //save default values of repos to check if they were changed
-      $scope.defaulfOSRepos = {};
       response.updateObj.operating_systems.forEach(function(os) {
         $scope.defaulfOSRepos[os.OperatingSystems.os_type] = {};
         os.repositories.forEach(function(repo) {
@@ -84,8 +82,6 @@
       } else {
         $scope.deleteEnabled = $scope.isDeletable();
       }
-      // fetch all repos to display the left menu
-      $scope.fetchRepos();
     });
   };
 
@@ -133,33 +129,34 @@
     });
   };
 
-  $scope.defaulfOSRepos = {};
-
   $scope.save = function () {
     $scope.editVersionDisabled = true;
     delete $scope.updateObj.href;
     $scope.updateObj.operating_systems = [];
-    var updateRepoUrl = false;
+    // check if there is any change in repo list
+    var changed = false;
     angular.forEach($scope.osList, function (os) {
       var savedUrls = $scope.defaulfOSRepos[os.OperatingSystems.os_type];
-      os.OperatingSystems.ambari_managed_repositories = !$scope.useRedhatSatellite;
-      if (os.selected) {
-        var currentRepos = os.repositories;
-        var urlChanged = false;
-        angular.forEach(currentRepos, function (repo) {
-          if (repo.Repositories.base_url != savedUrls[repo.Repositories.repo_id]) {
-            urlChanged = true;
-          }
-        });
-        if (!savedUrls || urlChanged) {
-          updateRepoUrl = true;
+      if (os.selected) { // currently shown?
+        if (savedUrls) { // initially loaded?
+          angular.forEach(os.repositories, function (repo) {
+            if (repo.Repositories.base_url != savedUrls[repo.Repositories.repo_id]) {
+              changed = true; // modified
+            }
+          });
+        } else {
+          changed = true; // added
         }
+        os.OperatingSystems.ambari_managed_repositories = !$scope.useRedhatSatellite;
         $scope.updateObj.operating_systems.push(os);
-      } else if (savedUrls) {
-        updateRepoUrl = true;
+      } else {
+        if (savedUrls) {
+          changed = true; // removed
+        }
       }
     });
-    if (updateRepoUrl && !$scope.deleteEnabled) {
+    // show confirmation when making changes to current/installed repo
+    if (changed && !$scope.deleteEnabled) {
       ConfirmationModal.show(
           $t('versions.changeBaseURLConfirmation.title'),
           $t('versions.changeBaseURLConfirmation.message'),
@@ -342,64 +339,5 @@
     return hasErrors;
   };
 
-
-  // add all repos list
-  $scope.filter = {
-    version: '',
-    cluster: {
-      options: [],
-      current: null
-    }
-  };
-
-  $scope.pagination = {
-    totalRepos: 100,
-    maxVisiblePages: 1,
-    itemsPerPage: 100,
-    currentPage: 1
-  };
-  $scope.allRepos = [];
-  $scope.stackVersions = [];
-
-
-
-  /**
-   *  Formatted object to display all repos:
-   *
-   *  [{ 'name': 'HDP-2.3',
-   *     'repos': ['2.3.6.0-2343', '2.3.4.1', '2.3.4.0-56']
-   *   },
-   *   { 'name': 'HDP-2.2',
-   *     'repos': ['2.2.6.0', '2.2.4.5', '2.2.4.0']
-   *   }
-   *  ]
-   *
-   */
-  $scope.fetchRepos = function () {
-    return Stack.allRepos($scope.filter, $scope.pagination).then(function (repos) {
-      $scope.allRepos = repos.items.sort(function(a, b){return a.repository_version < b.repository_version});
-      var existingStackHash = {};
-      var stackVersions = [];
-      angular.forEach($scope.allRepos, function (repo) {
-        var stackVersionName = repo.stack_name + '-' + repo.stack_version;
-        var currentStackVersion = $scope.upgradeStack.stack_name + '-' + $scope.upgradeStack.stack_version;
-        repo.isActive = $scope.actualVersion == repo.repository_version;
-        if (!existingStackHash[stackVersionName]) {
-          existingStackHash[stackVersionName] = true;
-          stackVersions.push({
-            'name': stackVersionName,
-            'isOpened': stackVersionName == currentStackVersion,
-            'repos': [repo]
-          });
-        } else {
-          if (stackVersions[stackVersions.length -1].repos) {
-            stackVersions[stackVersions.length -1].repos.push(repo);
-          }
-        }
-      });
-      $scope.stackVersions = stackVersions;
-    });
-  };
-
   $scope.loadStackVersionInfo();
 }]);
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index fba8538..fd2c6e5 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -307,7 +307,7 @@
     'users.showAll': 'Show all users',
     'users.showAdmin': 'Show only admin users',
     'users.groupMembership': 'Group Membership',
-    'users.userNameTip': 'Only alpha-numeric characters, up to 80 characters',
+    'users.userNameTip': 'Maximum length is 80 characters. \\, &, |, <, >, ` are not allowed.',
 
     'users.changeStatusConfirmation.title': 'Change Status',
     'users.changeStatusConfirmation.message': 'Are you sure you want to change status for user "{{userName}}" to {{status}}?',
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
index e028906..b496987 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
@@ -88,7 +88,7 @@
     },
 
     allPublicStackVersions: function() {
-      var url = '/version_definitions?fields=VersionDefinition/stack_default,operating_systems/repositories/Repositories/*,VersionDefinition/stack_services,VersionDefinition/repository_version' +
+      var url = '/version_definitions?fields=VersionDefinition/stack_default,VersionDefinition/stack_repo_update_link_exists,operating_systems/repositories/Repositories/*,VersionDefinition/stack_services,VersionDefinition/repository_version' +
         '&VersionDefinition/show_available=true';
       var deferred = $q.defer();
       $http.get(Settings.baseUrl + url, {mock: 'version/versions.json'})
@@ -100,6 +100,7 @@
               stackName: version.VersionDefinition.stack_name,
               stackVersion: version.VersionDefinition.stack_version,
               stackDefault: version.VersionDefinition.stack_default,
+              stackRepoUpdateLinkExists: version.VersionDefinition.stack_repo_update_link_exists,
               stackNameVersion:  version.VersionDefinition.stack_name + '-' + version.VersionDefinition.stack_version,
               displayName: version.VersionDefinition.stack_name + '-' + version.VersionDefinition.repository_version.split('-')[0], //HDP-2.3.4.0
               displayNameFull: version.VersionDefinition.stack_name + '-' + version.VersionDefinition.repository_version, //HDP-2.3.4.0-23
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/main.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/main.html
index d62ae15..3bdb80e 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/main.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/main.html
@@ -18,51 +18,79 @@
 <div class="panel panel-default mainpage">
   <div class="panel-body">
     <h1>{{'main.title' | translate}}</h1>
+
     <div ng-if="isLoaded" id="main-operations-boxes" class="row thumbnails">
       <p ng-hide="cluster">{{'main.noClusterDescription' | translate}}</p>
+
       <p ng-show="cluster">{{'main.hasClusterDescription' | translate}}</p>
 
-        <!--Clusters-->
+      <!--Clusters-->
       <div ng-show="cluster" class="col-sm-11 thumbnail">
         <h4 class="title">{{'main.operateCluster.title' | translate}}</h4>
+
         <div class="description">{{'main.operateCluster.description' | translate}}</div>
         <div class="glyphicon glyphicon-cloud"></div>
         <div class="buttons">
         <span ng-class="{active: isActive('clusters.manageAccess')}">
-          <a ng-show="cluster.Clusters.provisioning_state != 'INSTALLED'" href class="btn btn-primary permission-button" ng-disabled="cluster.Clusters.provisioning_state != 'INSTALLED' ">{{'main.operateCluster.manageRoles' | translate}}</a>
-          <a ng-show="cluster.Clusters.provisioning_state == 'INSTALLED'" href="#/clusters/{{cluster.Clusters.cluster_name}}/userAccessList" class="btn btn-primary permission-button" ng-disabled="cluster.Clusters.provisioning_state != 'INSTALLED' ">{{'main.operateCluster.manageRoles' | translate}}</a>
+          <a ng-show="cluster.Clusters.provisioning_state != 'INSTALLED'" href class="btn btn-primary permission-button"
+             ng-disabled="cluster.Clusters.provisioning_state != 'INSTALLED' ">{{'main.operateCluster.manageRoles' |
+            translate}}</a>
+          <a ng-show="cluster.Clusters.provisioning_state == 'INSTALLED'"
+             href="#/clusters/{{cluster.Clusters.cluster_name}}/userAccessList"
+             class="btn btn-primary permission-button"
+             ng-disabled="cluster.Clusters.provisioning_state != 'INSTALLED' ">{{'main.operateCluster.manageRoles' |
+            translate}}</a>
         </span>
         <span>
-          <a ng-show="cluster.Clusters.provisioning_state != 'INSTALLED'" href class="btn btn-primary go-dashboard-button" ng-disabled="cluster.Clusters.provisioning_state != 'INSTALLED' ">{{'common.goToDashboard' | translate}}</a>
-          <a ng-show="cluster.Clusters.provisioning_state == 'INSTALLED'" href="{{fromSiteRoot('/#/main/dashboard/metrics')}}" class="btn btn-primary go-dashboard-button" ng-disabled="cluster.Clusters.provisioning_state != 'INSTALLED' ">{{'common.goToDashboard' | translate}}</a>
+          <a ng-show="cluster.Clusters.provisioning_state != 'INSTALLED'" href
+             class="btn btn-primary go-dashboard-button"
+             ng-disabled="cluster.Clusters.provisioning_state != 'INSTALLED' ">{{'common.goToDashboard' |
+            translate}}</a>
+          <a ng-show="cluster.Clusters.provisioning_state == 'INSTALLED'"
+             href="{{fromSiteRoot('/#/main/dashboard/metrics')}}" class="btn btn-primary go-dashboard-button"
+             ng-disabled="cluster.Clusters.provisioning_state != 'INSTALLED' ">{{'common.goToDashboard' |
+            translate}}</a>
         </span>
         </div>
       </div>
       <div ng-hide="cluster" class="col-sm-11 thumbnail">
         <h4 class="title">{{'main.createCluster.title' | translate}}</h4>
+
         <div class="description">{{'main.createCluster.description' | translate}}</div>
         <div class="glyphicon glyphicon-cloud"></div>
-        <div class="buttons"> <a href="{{fromSiteRoot('/#/installer/step0')}}" class="btn btn-primary create-cluster-button">{{'main.createCluster.launchInstallWizard' | translate}}</a></div>
+        <div class="buttons"><a href="{{fromSiteRoot('/#/installer/step0')}}"
+                                class="btn btn-primary create-cluster-button">{{'main.createCluster.launchInstallWizard'
+          | translate}}</a></div>
       </div>
 
-        <!--Manage Users and groups-->
+      <!--Manage Users and groups-->
       <div class="col-sm-5 thumbnail">
         <h4 class="title">{{'main.manageUsersAndGroups.title' | translate}}</h4>
+
         <div class="description">{{'main.manageUsersAndGroups.description' | translate}}</div>
         <div class="glyphicon glyphicon-user"></div>
         <div class="buttons">
-          <span ng-class="{active: isActive('users.list')}"><link-to route="users.list" class="btn btn-primary userslist-button">{{'common.users' | translate}}</link-to></span>
-          <span ng-class="{active: isActive('groups.list')}"><link-to route="groups.list" class="btn btn-primary groupslist-button">{{'common.groups' | translate}}</link-to></span>
+          <span ng-class="{active: isActive('users.list')}"><link-to route="users.list"
+                                                                     class="btn btn-primary userslist-button">
+            {{'common.users' | translate}}
+          </link-to></span>
+          <span ng-class="{active: isActive('groups.list')}"><link-to route="groups.list"
+                                                                      class="btn btn-primary groupslist-button">
+            {{'common.groups' | translate}}
+          </link-to></span>
         </div>
       </div>
 
-        <!--Deploy Views-->
+      <!--Deploy Views-->
       <div class="col-sm-5 thumbnail">
         <h4 class="title">{{'main.deployViews.title' | translate}}</h4>
+
         <div class="description">{{'main.deployViews.description' | translate}}</div>
         <div class="glyphicon glyphicon-th"></div>
-        <div ng-class="{active: isActive('views.list')}" class="buttons"><link-to route="views.list" class="btn btn-primary viewslist-button">{{'common.views' | translate}}</link-to></div>
+        <div ng-class="{active: isActive('views.list')}" class="buttons">
+          <link-to route="views.list" class="btn btn-primary viewslist-button">{{'common.views' | translate}}</link-to>
         </div>
       </div>
+    </div>
   </div>
 </div>
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
index 3bee2a1..fe08802 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
@@ -115,8 +115,7 @@
   </div>
 
   <div id="upload-definition-file-panel" ng-if="createController">
-
-    <div class="col-sm-12 big-radio clearfix" ng-class="{'disabled' : networkLost || useRedhatSatellite}">
+    <div class="col-sm-12 big-radio clearfix hide-soft" ng-class="{'disabled' : networkLost || useRedhatSatellite,'visible':stackRepoUpdateLinkExists}">
       <input type="radio" ng-model="selectedOption.index" value="1" ng-change="togglePublicLocalOptionSelect()" ng-disabled="networkLost || useRedhatSatellite">
       <span>{{'versions.usePublic' | translate}}</span>
       <a id="public-disabled-link" href="javascript:void(0);" ng-if="networkLost" ng-click="showPublicRepoDisabledDialog()">{{'versions.networkIssues.networkLost'| translate}}</a>
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
index cc5d8d4..80a3b04 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
@@ -34,7 +34,7 @@
         placeholder="{{'users.userName' | translate}}"
         ng-model="user.user_name"
         ng-required="true"
-        ng-pattern="/^\w*$/"
+        ng-pattern="/^[^<>&`|\\]+$/"
         ng-maxlength="80"
         tooltip="{{'users.userNameTip' | translate}}"
         autocomplete="off"
diff --git a/ambari-agent/conf/unix/install-helper.sh b/ambari-agent/conf/unix/install-helper.sh
index 0e32d0a..c30aab1 100644
--- a/ambari-agent/conf/unix/install-helper.sh
+++ b/ambari-agent/conf/unix/install-helper.sh
@@ -70,6 +70,7 @@
   chmod a+x $AMBARI_AGENT_VAR
   
   chmod 1777 $AMBARI_AGENT_VAR/tmp
+  chmod 700 $AMBARI_AGENT_VAR/keys
   chmod 700 $AMBARI_AGENT_VAR/data
 
   #TODO we need this when upgrading from pre 2.4 versions to 2.4, remove this when upgrade from pre 2.4 versions will be
@@ -89,7 +90,7 @@
   rm -f "$PYTHON_WRAPER_TARGET"
 
   AMBARI_PYTHON=""
-  python_binaries=( "/usr/bin/python" "/usr/bin/python2" "/usr/bin/python2.7", "/usr/bin/python2.6" )
+  python_binaries=( "/usr/bin/python" "/usr/bin/python2" "/usr/bin/python2.7" "/usr/bin/python2.6" )
   for python_binary in "${python_binaries[@]}"
   do
     $python_binary -c "import sys ; ver = sys.version_info ; sys.exit(not (ver >= (2,6) and ver<(3,0)))" 1>/dev/null 2>/dev/null
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index a57ed64..c1cb056 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -330,6 +330,7 @@
                     <include>/tools/*.jar</include>
                     <include>/cache/stacks/HDP/2.1.GlusterFS/services/STORM/package/files/wordCount.jar</include>
                     <include>/cache/stacks/HDP/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar</include>
+                    <include>/cache/stacks/HDP/3.0/hooks/before-START/files/fast-hdfs-resource.jar</include>
                     <include>/cache/common-services/STORM/0.9.1/package/files/wordCount.jar</include>
                   </includes>
                 </source>
diff --git a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
index 75880c6..1eda5c2 100644
--- a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
+++ b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
@@ -498,16 +498,14 @@
 
   def execute_status_command_and_security_status(self, command):
     component_status_result = self.customServiceOrchestrator.requestComponentStatus(command)
-    component_security_status_result = self.customServiceOrchestrator.requestComponentSecurityState(command)
-
-    return command, component_status_result, component_security_status_result
+    return command, component_status_result
 
   def process_status_command_result(self, result):
     '''
     Executes commands of type STATUS_COMMAND
     '''
     try:
-      command, component_status_result, component_security_status_result = result
+      command, component_status_result = result
       cluster = command['clusterName']
       service = command['serviceName']
       component = command['componentName']
@@ -548,9 +546,6 @@
       if self.controller.recovery_manager.enabled():
         result['sendExecCmdDet'] = str(request_execution_cmd)
 
-      # Add security state to the result
-      result['securityState'] = component_security_status_result
-
       if component_extra is not None and len(component_extra) != 0:
         if component_extra.has_key('alerts'):
           result['alerts'] = component_extra['alerts']
diff --git a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
index cf48189..95e4712 100644
--- a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
+++ b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
@@ -27,6 +27,7 @@
 
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+
 logger = logging.getLogger(__name__)
 
 content = """
@@ -74,9 +75,8 @@
 
 """.format(ps=os.sep)
 
-
 servicesToPidNames = {
-  'GLUSTERFS' : 'glusterd.pid$',
+  'GLUSTERFS': 'glusterd.pid$',
   'NAMENODE': 'hadoop-{USER}-namenode.pid$',
   'SECONDARY_NAMENODE': 'hadoop-{USER}-secondarynamenode.pid$',
   'DATANODE': 'hadoop-{USER}-datanode.pid$',
@@ -97,13 +97,13 @@
   'KERBEROS_SERVER': 'kadmind.pid',
   'HIVE_SERVER': 'hive-server.pid',
   'HIVE_METASTORE': 'hive.pid',
-  'HIVE_SERVER_INTERACTIVE' : 'hive-interactive.pid',
+  'HIVE_SERVER_INTERACTIVE': 'hive-interactive.pid',
   'MYSQL_SERVER': 'mysqld.pid',
   'HUE_SERVER': '/var/run/hue/supervisor.pid',
   'WEBHCAT_SERVER': 'webhcat.pid',
 }
 
-#Each service, which's pid depends on user should provide user mapping
+# Each service, which's pid depends on user should provide user mapping
 servicesToLinuxUser = {
   'NAMENODE': 'hdfs_user',
   'SECONDARY_NAMENODE': 'hdfs_user',
@@ -120,30 +120,30 @@
 }
 
 pidPathVars = [
-  {'var' : 'glusterfs_pid_dir_prefix',
-   'defaultValue' : '/var/run'},
-  {'var' : 'hadoop_pid_dir_prefix',
-   'defaultValue' : '/var/run/hadoop'},
-  {'var' : 'hadoop_pid_dir_prefix',
-   'defaultValue' : '/var/run/hadoop'},
-  {'var' : 'hbase_pid_dir',
-   'defaultValue' : '/var/run/hbase'},
-  {'var' : 'zk_pid_dir',
-   'defaultValue' : '/var/run/zookeeper'},
-  {'var' : 'oozie_pid_dir',
-   'defaultValue' : '/var/run/oozie'},
-  {'var' : 'hcat_pid_dir',
-   'defaultValue' : '/var/run/webhcat'},
-  {'var' : 'hive_pid_dir',
-   'defaultValue' : '/var/run/hive'},
-  {'var' : 'mysqld_pid_dir',
-   'defaultValue' : '/var/run/mysqld'},
-  {'var' : 'hcat_pid_dir',
-   'defaultValue' : '/var/run/webhcat'},
-  {'var' : 'yarn_pid_dir_prefix',
-   'defaultValue' : '/var/run/hadoop-yarn'},
-  {'var' : 'mapred_pid_dir_prefix',
-   'defaultValue' : '/var/run/hadoop-mapreduce'},
+  {'var': 'glusterfs_pid_dir_prefix',
+   'defaultValue': '/var/run'},
+  {'var': 'hadoop_pid_dir_prefix',
+   'defaultValue': '/var/run/hadoop'},
+  {'var': 'hadoop_pid_dir_prefix',
+   'defaultValue': '/var/run/hadoop'},
+  {'var': 'hbase_pid_dir',
+   'defaultValue': '/var/run/hbase'},
+  {'var': 'zk_pid_dir',
+   'defaultValue': '/var/run/zookeeper'},
+  {'var': 'oozie_pid_dir',
+   'defaultValue': '/var/run/oozie'},
+  {'var': 'hcat_pid_dir',
+   'defaultValue': '/var/run/webhcat'},
+  {'var': 'hive_pid_dir',
+   'defaultValue': '/var/run/hive'},
+  {'var': 'mysqld_pid_dir',
+   'defaultValue': '/var/run/mysqld'},
+  {'var': 'hcat_pid_dir',
+   'defaultValue': '/var/run/webhcat'},
+  {'var': 'yarn_pid_dir_prefix',
+   'defaultValue': '/var/run/hadoop-yarn'},
+  {'var': 'mapred_pid_dir_prefix',
+   'defaultValue': '/var/run/hadoop-mapreduce'},
 ]
 
 
@@ -323,14 +323,37 @@
     if reg_resp and AmbariConfig.AMBARI_PROPERTIES_CATEGORY in reg_resp:
       if not self.has_section(AmbariConfig.AMBARI_PROPERTIES_CATEGORY):
         self.add_section(AmbariConfig.AMBARI_PROPERTIES_CATEGORY)
-      for k,v in reg_resp[AmbariConfig.AMBARI_PROPERTIES_CATEGORY].items():
+      for k, v in reg_resp[AmbariConfig.AMBARI_PROPERTIES_CATEGORY].items():
         self.set(AmbariConfig.AMBARI_PROPERTIES_CATEGORY, k, v)
         logger.info("Updating config property (%s) with value (%s)", k, v)
     pass
 
-  def get_force_https_protocol(self):
+  def get_force_https_protocol_name(self):
+    """
+    Get forced https protocol name.
+
+    :return: protocol name, PROTOCOL_TLSv1 by default
+    """
     return self.get('security', 'force_https_protocol', default="PROTOCOL_TLSv1")
 
+  def get_force_https_protocol_value(self):
+    """
+    Get forced https protocol value that correspondents to ssl module variable.
+
+    :return: protocol value
+    """
+    import ssl
+    return getattr(ssl, self.get_force_https_protocol_name())
+
+  def get_ca_cert_file_path(self):
+    """
+    Get path to file with trusted certificates.
+
+    :return: trusted certificates file path
+    """
+    return self.get('security', 'ca_cert_path', default="")
+
+
 def isSameHostList(hostlist1, hostlist2):
   is_same = True
 
diff --git a/ambari-agent/src/main/python/ambari_agent/Controller.py b/ambari-agent/src/main/python/ambari_agent/Controller.py
index 78b5c0c..0297f74 100644
--- a/ambari-agent/src/main/python/ambari_agent/Controller.py
+++ b/ambari-agent/src/main/python/ambari_agent/Controller.py
@@ -29,6 +29,7 @@
 import urllib2
 import pprint
 from random import randint
+import re
 import subprocess
 import functools
 
@@ -587,7 +588,9 @@
     Stack Upgrade.
     """
     try:
-      if compare_versions(self.version, "2.1.2") >= 0:
+      version = self.get_version()
+      logger.debug("Ambari Agent version {0}".format(version))
+      if compare_versions(version, "2.1.2") >= 0:
         source_file = "/etc/hadoop/conf/dfs_data_dir_mount.hist"
         destination_file = "/var/lib/ambari-agent/data/datanode/dfs_data_dir_mount.hist"
         if os.path.exists(source_file) and not os.path.exists(destination_file):
@@ -601,9 +604,16 @@
           return_code = subprocess.call(command, shell=True)
           logger.info("Return code: %d" % return_code)
     except Exception, e:
-      logger.info("Exception in move_data_dir_mount_file(). Error: {0}".format(str(e)))
+      logger.error("Exception in move_data_dir_mount_file(). Error: {0}".format(str(e)))
 
-
+  def get_version(self):
+    version = self.version
+    matches = re.findall(r"[\d+.]+",version)
+    if not matches:
+      logger.warning("No version match result, use original version {0}".format(version))
+      return version
+    else:
+      return matches[0]
 
 def main(argv=None):
   # Allow Ctrl-C
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index a67e16e..7dd00de 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -47,7 +47,6 @@
   SCRIPT_TYPE_PYTHON = "PYTHON"
   COMMAND_TYPE = "commandType"
   COMMAND_NAME_STATUS = "STATUS"
-  COMMAND_NAME_SECURITY_STATUS = "SECURITY_STATUS"
   CUSTOM_ACTION_COMMAND = 'ACTIONEXECUTE'
   CUSTOM_COMMAND_COMMAND = 'CUSTOM_COMMAND'
 
@@ -63,7 +62,7 @@
   AMBARI_SERVER_PORT = "ambari_server_port"
   AMBARI_SERVER_USE_SSL = "ambari_server_use_ssl"
 
-  FREQUENT_COMMANDS = [COMMAND_NAME_SECURITY_STATUS, COMMAND_NAME_STATUS]
+  FREQUENT_COMMANDS = [COMMAND_NAME_STATUS]
   DONT_DEBUG_FAILURES_FOR_COMMANDS = FREQUENT_COMMANDS
   REFLECTIVELY_RUN_COMMANDS = FREQUENT_COMMANDS # -- commands which run a lot and often (this increases their speed)
   DONT_BACKUP_LOGS_FOR_COMMANDS = FREQUENT_COMMANDS
@@ -82,7 +81,8 @@
   def __init__(self, config, controller):
     self.config = config
     self.tmp_dir = config.get('agent', 'prefix')
-    self.force_https_protocol = config.get_force_https_protocol()
+    self.force_https_protocol = config.get_force_https_protocol_name()
+    self.ca_cert_file_path = config.get_ca_cert_file_path()
     self.exec_tmp_dir = Constants.AGENT_TMP_DIR
     self.file_cache = FileCache(config)
     self.status_commands_stdout = os.path.join(self.tmp_dir,
@@ -396,7 +396,7 @@
       for py_file, current_base_dir in filtered_py_file_list:
         log_info_on_failure = not command_name in self.DONT_DEBUG_FAILURES_FOR_COMMANDS
         script_params = [command_name, json_path, current_base_dir, tmpstrucoutfile, logger_level, self.exec_tmp_dir,
-                         self.force_https_protocol]
+                         self.force_https_protocol, self.ca_cert_file_path]
         
         if log_out_files:
           script_params.append("-o")
@@ -467,36 +467,6 @@
                           override_output_files=override_output_files)
     return res
 
-  def requestComponentSecurityState(self, command):
-    """
-     Determines the current security state of the component
-     A command will be issued to trigger the security_status check and the result of this check will
-     returned to the caller. If the component lifecycle script has no security_status method the
-     check will return non zero exit code and "UNKNOWN" will be returned.
-    """
-    override_output_files=True # by default, we override status command output
-    if logger.level == logging.DEBUG:
-      override_output_files = False
-    security_check_res = self.runCommand(command, self.status_commands_stdout,
-                                         self.status_commands_stderr, self.COMMAND_NAME_SECURITY_STATUS,
-                                         override_output_files=override_output_files)
-    result = 'UNKNOWN'
-
-    if security_check_res is None:
-      logger.warn("The return value of the security_status check was empty, the security status is unknown")
-    elif 'exitcode' not in security_check_res:
-      logger.warn("Missing 'exitcode' value from the security_status check result, the security status is unknown")
-    elif security_check_res['exitcode'] != 0:
-      logger.debug("The 'exitcode' value from the security_status check result indicated the check routine failed to properly execute, the security status is unknown")
-    elif 'structuredOut' not in security_check_res:
-      logger.warn("Missing 'structuredOut' value from the security_status check result, the security status is unknown")
-    elif 'securityState' not in security_check_res['structuredOut']:
-      logger.warn("Missing 'securityState' value from the security_status check structuredOut data set, the security status is unknown")
-    else:
-      result = security_check_res['structuredOut']['securityState']
-
-    return result
-
   def resolve_script_path(self, base_dir, script):
     """
     Encapsulates logic of script location determination.
diff --git a/ambari-agent/src/main/python/ambari_agent/Hardware.py b/ambari-agent/src/main/python/ambari_agent/Hardware.py
index 8cb8a28..696438e 100644
--- a/ambari-agent/src/main/python/ambari_agent/Hardware.py
+++ b/ambari-agent/src/main/python/ambari_agent/Hardware.py
@@ -45,11 +45,13 @@
   LINUX_PATH_SEP = "/"
 
   def __init__(self, config):
+    logger.info("Initializing host system information.")
     self.hardware = {
       'mounts': Hardware.osdisks()
     }
     self.config = config
     self.hardware.update(Facter(self.config).facterInfo())
+    logger.info("Host system information: %s", self.hardware)
 
   @classmethod
   def _parse_df_line(cls, line):
diff --git a/ambari-agent/src/main/python/ambari_agent/HostInfo.py b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
index 4b7bfd7..5f96df5 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostInfo.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
@@ -68,9 +68,22 @@
     return 'unknown'
 
   def checkLiveServices(self, services, result):
+    is_redhat7_or_higher = False
+    is_redhat = False
+
+    if OSCheck.is_redhat_family():
+      is_redhat = True
+      if int(OSCheck.get_os_major_version()) >= 7:
+        is_redhat7_or_higher = True
+
     for service in services:
       svcCheckResult = {}
-      svcCheckResult['name'] = " or ".join(service)
+      if "ntpd" in service and is_redhat7_or_higher:
+        svcCheckResult['name'] = "chronyd"
+      elif "chronyd" in service and is_redhat:
+        svcCheckResult['name'] = "ntpd"
+      else:
+        svcCheckResult['name'] = " or ".join(service)
       svcCheckResult['status'] = "UNKNOWN"
       svcCheckResult['desc'] = ""
       try:
diff --git a/ambari-agent/src/main/python/ambari_agent/NetUtil.py b/ambari-agent/src/main/python/ambari_agent/NetUtil.py
index 9b29633..fe32efe 100644
--- a/ambari-agent/src/main/python/ambari_agent/NetUtil.py
+++ b/ambari-agent/src/main/python/ambari_agent/NetUtil.py
@@ -29,7 +29,10 @@
 
 logger = logging.getLogger(__name__)
 
-ensure_ssl_using_protocol(AmbariConfig.get_resolved_config().get_force_https_protocol())
+ensure_ssl_using_protocol(
+  AmbariConfig.get_resolved_config().get_force_https_protocol_name(),
+  AmbariConfig.get_resolved_config().get_ca_cert_file_path()
+)
 
 class NetUtil:
 
diff --git a/ambari-agent/src/main/python/ambari_agent/StatusCommandsExecutor.py b/ambari-agent/src/main/python/ambari_agent/StatusCommandsExecutor.py
index 142e7ca..f42e134 100644
--- a/ambari-agent/src/main/python/ambari_agent/StatusCommandsExecutor.py
+++ b/ambari-agent/src/main/python/ambari_agent/StatusCommandsExecutor.py
@@ -83,280 +83,5 @@
   def kill(self, reason=None, can_relaunch=True):
     pass
 
-class MultiProcessStatusCommandsExecutor(StatusCommandsExecutor):
-  def __init__(self, config, actionQueue):
-    self.config = config
-    self.actionQueue = actionQueue
-
-    self.can_relaunch = True
-
-    # used to prevent queues from been used during creation of new one to prevent threads messing up with combination of
-    # old and new queues
-    self.usage_lock = threading.RLock()
-    # protects against simultaneous killing/creating from different threads.
-    self.kill_lock = threading.RLock()
-
-    self.status_command_timeout = int(self.config.get('agent', 'status_command_timeout', 5))
-    self.customServiceOrchestrator = self.actionQueue.customServiceOrchestrator
-
-    self.worker_process = None
-    self.mustDieEvent = multiprocessing.Event()
-    self.timedOutEvent = multiprocessing.Event()
-
-    # multiprocessing stuff that need to be cleaned every time
-    self.mp_result_queue = multiprocessing.Queue()
-    self.mp_result_logs = multiprocessing.Queue()
-    self.mp_task_queue = multiprocessing.Queue()
-
-  def _drain_queue(self, target_queue, max_time=5, max_empty_count=15, read_break=.001):
-    """
-    Read everything that available in queue. Using not reliable multiprocessing.Queue methods(qsize, empty), so contains
-    extremely dumb protection against blocking too much at this method: will try to get all possible items for not more
-    than ``max_time`` seconds; will return after ``max_empty_count`` calls of ``target_queue.get(False)`` that raised
-    ``Queue.Empty`` exception. Notice ``read_break`` argument, with default values this method will be able to read
-    ~4500 ``range(1,10000)`` objects for 5 seconds. So don't fill queue too fast.
-
-    :param target_queue: queue to read from
-    :param max_time: maximum time to spend in this method call
-    :param max_empty_count: maximum allowed ``Queue.Empty`` in a row
-    :param read_break: time to wait before next read cycle iteration
-    :return: list of resulting objects
-    """
-    results = []
-    _empty = 0
-    _start = time.time()
-    with self.usage_lock:
-      try:
-        while (not target_queue.empty() or target_queue.qsize() > 0) and time.time() - _start < max_time and _empty < max_empty_count:
-          try:
-            results.append(target_queue.get(False))
-            _empty = 0
-            time.sleep(read_break) # sleep a little to get more accurate empty and qsize results
-          except Queue.Empty:
-            _empty += 1
-          except IOError:
-            pass
-          except UnicodeDecodeError:
-            pass
-      except IOError:
-        pass
-    return results
-
-  def _log_message(self, level, message, exception=None):
-    """
-    Put log message to logging queue. Must be used only for logging from child process(in _worker_process_target).
-
-    :param level:
-    :param message:
-    :param exception:
-    :return:
-    """
-    result_message = "StatusCommandExecutor reporting at {0}: ".format(time.time()) + message
-    self.mp_result_logs.put((level, result_message, exception))
-
-  def _process_logs(self):
-    """
-    Get all available at this moment logs and prints them to logger.
-    """
-    for level, message, exception in self._drain_queue(self.mp_result_logs):
-      if level == logging.ERROR:
-        logger.debug(message, exc_info=exception)
-      if level == logging.WARN:
-        logger.warn(message)
-      if level == logging.INFO:
-        logger.info(message)
-
-  def _worker_process_target(self):
-    """
-    Internal method that running in separate process.
-    """
-    # cleanup monkey-patching results in child process, as it causing problems
-    import subprocess
-    reload(subprocess)
-    import multiprocessing
-    reload(multiprocessing)
-
-    bind_debug_signal_handlers()
-    self._log_message(logging.INFO, "StatusCommandsExecutor process started")
-
-    # region StatusCommandsExecutor process internals
-    internal_in_queue = Queue.Queue()
-    internal_out_queue = Queue.Queue()
-
-    def _internal_worker():
-      """
-      thread that actually executes status commands
-      """
-      while True:
-        _cmd = internal_in_queue.get()
-        internal_out_queue.put(self.actionQueue.execute_status_command_and_security_status(_cmd))
-
-    worker = threading.Thread(target=_internal_worker)
-    worker.daemon = True
-    worker.start()
-
-    def _internal_process_command(_command):
-      internal_in_queue.put(_command)
-      start_time = time.time()
-      result = None
-      while not self.mustDieEvent.is_set() and not result and time.time() - start_time < self.status_command_timeout:
-        try:
-          result = internal_out_queue.get(timeout=1)
-        except Queue.Empty:
-          pass
-
-      if result:
-        self.mp_result_queue.put(result)
-        return True
-      else:
-        # do not set timed out event twice
-        if not self.timedOutEvent.is_set():
-          self._set_timed_out(_command)
-        return False
-
-    # endregion
-
-    try:
-      while not self.mustDieEvent.is_set():
-        try:
-          command = self.mp_task_queue.get(False)
-        except Queue.Empty:
-          # no command, lets try in other loop iteration
-          time.sleep(.1)
-          continue
-
-        self._log_message(logging.DEBUG, "Running status command for {0}".format(command['componentName']))
-
-        if _internal_process_command(command):
-          self._log_message(logging.DEBUG, "Completed status command for {0}".format(command['componentName']))
-
-    except Exception as e:
-      self._log_message(logging.ERROR, "StatusCommandsExecutor process failed with exception:", e)
-      raise
-
-    self._log_message(logging.INFO, "StatusCommandsExecutor subprocess finished")
-
-  def _set_timed_out(self, command):
-    """
-    Set timeout event and adding log entry for given command.
-
-    :param command:
-    :return:
-    """
-    msg = "Command {0} for {1} is running for more than {2} seconds. Terminating it due to timeout.".format(
-        command['commandType'],
-        command['componentName'],
-        self.status_command_timeout
-    )
-    self._log_message(logging.WARN, msg)
-    self.timedOutEvent.set()
-
-  def put_commands(self, commands):
-    """
-    Put given commands to command executor.
-
-    :param commands: status commands to execute
-    :return:
-    """
-    with self.usage_lock:
-      for command in commands:
-        logger.info("Adding " + command['commandType'] + " for component " + \
-                    command['componentName'] + " of service " + \
-                    command['serviceName'] + " of cluster " + \
-                    command['clusterName'] + " to the queue.")
-        self.mp_task_queue.put(command)
-        logger.debug(pprint.pformat(command))
-
-  def process_results(self):
-    """
-    Process all the results from the SCE worker process.
-    """
-    self._process_logs()
-    results = self._drain_queue(self.mp_result_queue)
-    logger.debug("Drained %s status commands results, ~%s remains in queue", len(results), self.mp_result_queue.qsize())
-    for result in results:
-      try:
-        self.actionQueue.process_status_command_result(result)
-      except UnicodeDecodeError:
-        pass
-
-  @property
-  def need_relaunch(self):
-    """
-    Indicates if process need to be relaunched due to timeout or it is dead or even was not created.
-
-    :return: tuple (bool, str|None) with flag to relaunch and reason of relaunch
-    """
-    if not self.worker_process or not self.worker_process.is_alive():
-      return True, "WORKER_DEAD"
-    elif self.timedOutEvent.is_set():
-      return True, "COMMAND_TIMEOUT"
-    return False, None
-
-  def relaunch(self, reason=None):
-    """
-    Restart status command executor internal process.
-
-    :param reason: reason of restart
-    :return:
-    """
-    with self.kill_lock:
-      logger.info("Relaunching child process reason:" + str(reason))
-      if self.can_relaunch:
-        self.kill(reason)
-        self.worker_process = multiprocessing.Process(target=self._worker_process_target)
-        self.worker_process.start()
-        logger.info("Started process with pid {0}".format(self.worker_process.pid))
-      else:
-        logger.debug("Relaunch does not allowed, can not relaunch")
-
-  def kill(self, reason=None, can_relaunch=True):
-    """
-    Tries to stop command executor internal process for sort time, otherwise killing it. Closing all possible queues to
-    unblock threads that probably blocked on read or write operations to queues. Must be called from threads different
-    from threads that calling read or write methods(get_log_messages, get_results, put_commands).
-
-    :param can_relaunch: indicates if StatusCommandsExecutor can be relaunched after this kill
-    :param reason: reason of killing
-    :return:
-    """
-    with self.kill_lock:
-      self.can_relaunch = can_relaunch
-
-      if not self.can_relaunch:
-        logger.info("Killing without possibility to relaunch...")
-
-      # try graceful stop, otherwise hard-kill
-      if self.worker_process and self.worker_process.is_alive():
-        self.mustDieEvent.set()
-        self.worker_process.join(timeout=3)
-        if self.worker_process.is_alive():
-          os.kill(self.worker_process.pid, signal.SIGKILL)
-          logger.info("Child process killed by -9")
-        else:
-          # get log messages only if we died gracefully, otherwise we will have chance to block here forever, in most cases
-          # this call will do nothing, as all logs will be processed in ActionQueue loop
-          self._process_logs()
-          logger.info("Child process died gracefully")
-      else:
-        logger.info("Child process already dead")
-
-      # close queues and acquire usage lock
-      # closing both sides of pipes here, we need this hack in case of blocking on recv() call
-      self.mp_result_queue.close()
-      self.mp_result_queue._writer.close()
-      self.mp_result_logs.close()
-      self.mp_result_logs._writer.close()
-      self.mp_task_queue.close()
-      self.mp_task_queue._writer.close()
-
-      with self.usage_lock:
-        self.mp_result_queue.join_thread()
-        self.mp_result_queue = multiprocessing.Queue()
-        self.mp_task_queue.join_thread()
-        self.mp_task_queue = multiprocessing.Queue()
-        self.mp_result_logs.join_thread()
-        self.mp_result_logs = multiprocessing.Queue()
-        self.customServiceOrchestrator = self.actionQueue.customServiceOrchestrator
-        self.mustDieEvent.clear()
-        self.timedOutEvent.clear()
+# TODO make reliable MultiProcessStatusCommandsExecutor implementation
+MultiProcessStatusCommandsExecutor = SingleProcessStatusCommandsExecutor
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
index ef144bb..8ce4405 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
@@ -55,7 +55,10 @@
 
 WebResponse = namedtuple('WebResponse', 'status_code time_millis error_msg')
 
-ensure_ssl_using_protocol(AmbariConfig.get_resolved_config().get_force_https_protocol())
+ensure_ssl_using_protocol(
+    AmbariConfig.get_resolved_config().get_force_https_protocol_name(),
+    AmbariConfig.get_resolved_config().get_ca_cert_file_path()
+)
 
 class WebAlert(BaseAlert):
 
diff --git a/ambari-agent/src/main/python/ambari_agent/hostname.py b/ambari-agent/src/main/python/ambari_agent/hostname.py
index 0f5f069..357c6b0 100644
--- a/ambari-agent/src/main/python/ambari_agent/hostname.py
+++ b/ambari-agent/src/main/python/ambari_agent/hostname.py
@@ -23,6 +23,7 @@
 import urllib2
 import logging
 import traceback
+import sys
 
 logger = logging.getLogger(__name__)
 
@@ -52,12 +53,19 @@
       out, err = osStat.communicate()
       if (0 == osStat.returncode and 0 != len(out.strip())):
         cached_hostname = out.strip()
+        logger.info("Read hostname '{0}' using agent:hostname_script '{1}'".format(cached_hostname, scriptname))
       else:
+        logger.warn("Execution of '{0}' failed with exit code {1}. err='{2}'\nout='{3}'".format(scriptname, osStat.returncode, err.strip(), out.strip()))
         cached_hostname = socket.getfqdn()
+        logger.info("Read hostname '{0}' using socket.getfqdn() as '{1}' failed".format(cached_hostname, scriptname))
     except:
       cached_hostname = socket.getfqdn()
+      logger.warn("Unexpected error while retrieving hostname: '{0}', defaulting to socket.getfqdn()".format(sys.exc_info()))
+      logger.info("Read hostname '{0}' using socket.getfqdn().".format(cached_hostname))
   except:
     cached_hostname = socket.getfqdn()
+    logger.info("agent:hostname_script configuration not defined thus read hostname '{0}' using socket.getfqdn().".format(cached_hostname))
+
   cached_hostname = cached_hostname.lower()
   return cached_hostname
 
diff --git a/ambari-agent/src/packages/tarball/all.xml b/ambari-agent/src/packages/tarball/all.xml
index 363941a..c71ffe9 100644
--- a/ambari-agent/src/packages/tarball/all.xml
+++ b/ambari-agent/src/packages/tarball/all.xml
@@ -44,21 +44,25 @@
   <fileSets>
     <fileSet>
       <directoryMode>755</directoryMode>
+      <fileMode>755</fileMode>
       <directory>src/main/python/ambari_agent</directory>
       <outputDirectory>${agent.install.dir}</outputDirectory>
     </fileSet>
     <fileSet>
       <directoryMode>755</directoryMode>
+      <fileMode>755</fileMode>
       <directory>${project.basedir}/../ambari-common/src/main/python/ambari_commons</directory>
       <outputDirectory>${ambari_commons.install.dir}</outputDirectory>
     </fileSet>
     <fileSet>
       <directoryMode>755</directoryMode>
+      <fileMode>755</fileMode>
       <directory>${resourceManagementSrcLocation}</directory>
       <outputDirectory>${resource_management.install.dir}</outputDirectory>
     </fileSet>
     <fileSet>
       <directoryMode>755</directoryMode>
+      <fileMode>755</fileMode>
       <directory>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</directory>
       <outputDirectory>${jinja.install.dir}</outputDirectory>
       <excludes>
@@ -67,11 +71,13 @@
     </fileSet>
     <fileSet>
       <directoryMode>755</directoryMode>
+      <fileMode>755</fileMode>
       <directory>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</directory>
       <outputDirectory>${simplejson.install.dir}</outputDirectory>
     </fileSet>
     <fileSet>
       <directoryMode>755</directoryMode>
+      <fileMode>755</fileMode>
       <directory>src/examples</directory>
       <outputDirectory>${lib.dir}/examples</outputDirectory>
     </fileSet>
@@ -92,7 +98,8 @@
 	  </excludes>
     </fileSet>
     <fileSet>
-      <directoryMode>755</directoryMode>
+      <directoryMode>700</directoryMode>
+      <fileMode>700</fileMode>
       <directory>${empty.dir}</directory>
       <outputDirectory>/var/lib/${project.artifactId}/keys</outputDirectory>
 	  <excludes>
@@ -117,11 +124,13 @@
     </fileSet>
     <fileSet>
       <directoryMode>755</directoryMode>
+      <fileMode>755</fileMode>
       <directory>${target.cache.dir}</directory>
       <outputDirectory>/var/lib/ambari-agent/cache</outputDirectory>
     </fileSet>
     <fileSet>
       <directoryMode>755</directoryMode>
+      <fileMode>755</fileMode>
       <directory>${pluggableStackDefinitionOutput}/custom_actions</directory>
       <outputDirectory>/var/lib/ambari-agent/cache/custom_actions</outputDirectory>
     </fileSet>
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
index ab46f96..faa9b81 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
@@ -988,12 +988,11 @@
 
     dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp())
 
-    result = (self.status_command, {'exitcode': 0 }, 'UNKNOWN')
+    result = (self.status_command, {'exitcode': 0 })
 
     actionQueue.process_status_command_result(result)
     report = actionQueue.result()
-    expected = {'dummy report': '',
-                'securityState' : 'UNKNOWN'}
+    expected = {'dummy report': ''}
 
     self.assertEqual(len(report['componentStatus']), 1)
     self.assertEqual(report['componentStatus'][0], expected)
@@ -1019,12 +1018,11 @@
 
     dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp(), True, False)
 
-    result = (self.status_command, {'exitcode': 0 }, 'UNKNOWN')
+    result = (self.status_command, {'exitcode': 0 })
 
     actionQueue.process_status_command_result(result)
     report = actionQueue.result()
     expected = {'dummy report': '',
-                'securityState' : 'UNKNOWN',
                 'sendExecCmdDet': 'True'}
 
     self.assertEqual(len(report['componentStatus']), 1)
@@ -1033,12 +1031,11 @@
     requires_recovery_mock.return_value = True
     command_exists_mock.return_value = True
     
-    result = (self.status_command, {'exitcode': 0 }, 'UNKNOWN')
+    result = (self.status_command, {'exitcode': 0 })
 
     actionQueue.process_status_command_result(result)
     report = actionQueue.result()
     expected = {'dummy report': '',
-                'securityState' : 'UNKNOWN',
                 'sendExecCmdDet': 'False'}
 
     self.assertEqual(len(report['componentStatus']), 1)
@@ -1062,7 +1059,7 @@
       'structuredOut': {'alerts': [ {'name': 'flume_alert'} ] }
     }
     
-    result = (self.status_command_for_alerts, command_return_value, command_return_value)
+    result = (self.status_command_for_alerts, command_return_value)
     
     build_mock.return_value = {'somestatusresult': 'aresult'}
 
diff --git a/ambari-agent/src/test/python/ambari_agent/TestController.py b/ambari-agent/src/test/python/ambari_agent/TestController.py
index 7f5d451..20da81f 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestController.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestController.py
@@ -416,6 +416,20 @@
                         exceptionMessage, str(e))
 
 
+  def test_getVersion(self):
+    self.controller.version = "1.2.3.4_MyAgent"
+    version = self.controller.get_version()
+    self.assertEquals('1.2.3.4', version)
+    self.controller.version = "1.2.3-MyAgent"
+    version = self.controller.get_version()
+    self.assertEquals('1.2.3', version)
+    self.controller.version = "11.2.3-MyAgent"
+    version = self.controller.get_version()
+    self.assertEquals('11.2.3', version)
+    self.controller.version = "11.2.13.10_MyAgent"
+    version = self.controller.get_version()
+    self.assertEquals('11.2.13.10', version)
+
   @patch.object(ExitHelper, "exit")
   @patch.object(threading._Event, "wait")
   @patch("time.sleep")
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
index 8e5e9a3..c54ffca 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
@@ -569,57 +569,6 @@
     status = orchestrator.requestComponentStatus(status_command)
     self.assertEqual(runCommand_mock.return_value, status)
 
-  @patch.object(CustomServiceOrchestrator, "runCommand")
-  @patch.object(FileCache, "__init__")
-  def test_requestComponentSecurityState(self, FileCache_mock, runCommand_mock):
-    FileCache_mock.return_value = None
-    status_command = {
-      "serviceName" : 'HDFS',
-      "commandType" : "STATUS_COMMAND",
-      "clusterName" : "",
-      "componentName" : "DATANODE",
-      'configurations':{}
-    }
-    dummy_controller = MagicMock()
-    orchestrator = CustomServiceOrchestrator(self.config, dummy_controller)
-    # Test securityState
-    runCommand_mock.return_value = {
-      'exitcode' : 0,
-      'structuredOut' : {'securityState': 'UNSECURED'}
-    }
-
-    status = orchestrator.requestComponentSecurityState(status_command)
-    self.assertEqual('UNSECURED', status)
-
-    # Test case where exit code indicates failure
-    runCommand_mock.return_value = {
-      "exitcode" : 1
-    }
-    status = orchestrator.requestComponentSecurityState(status_command)
-    self.assertEqual('UNKNOWN', status)
-
-  @patch.object(FileCache, "__init__")
-  def test_requestComponentSecurityState_realFailure(self, FileCache_mock):
-    '''
-    Tests the case where the CustomServiceOrchestrator attempts to call a service's security_status
-    method, but fails to do so because the script or method was not found.
-    :param FileCache_mock:
-    :return:
-    '''
-    FileCache_mock.return_value = None
-    status_command = {
-      "serviceName" : 'BOGUS_SERVICE',
-      "commandType" : "STATUS_COMMAND",
-      "clusterName" : "",
-      "componentName" : "DATANODE",
-      'configurations':{}
-    }
-    dummy_controller = MagicMock()
-    orchestrator = CustomServiceOrchestrator(self.config, dummy_controller)
-
-    status = orchestrator.requestComponentSecurityState(status_command)
-    self.assertEqual('UNKNOWN', status)
-
 
   @patch.object(CustomServiceOrchestrator, "get_py_executor")
   @patch.object(CustomServiceOrchestrator, "dump_command_to_json")
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHardware.py b/ambari-agent/src/test/python/ambari_agent/TestHardware.py
index d30020c..5400e26 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestHardware.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestHardware.py
@@ -226,8 +226,10 @@
   @patch.object(FacterLinux, "setMemInfoOutput")
   @patch.object(OSCheck, "get_os_type")
   @patch.object(OSCheck, "get_os_version")
-  def test_facterMemInfoOutput(self, get_os_version_mock, get_os_type_mock, facter_setMemInfoOutput_mock):
+  @patch.object(FacterLinux, "getSystemResourceOverrides")
+  def test_facterMemInfoOutput(self, getSystemResourceOverridesMock, get_os_version_mock, get_os_type_mock, facter_setMemInfoOutput_mock):
 
+    getSystemResourceOverridesMock.return_value = {}
     facter_setMemInfoOutput_mock.return_value = '''
 MemTotal:        1832392 kB
 MemFree:          868648 kB
diff --git a/ambari-agent/src/test/python/ambari_agent/TestShell.py b/ambari-agent/src/test/python/ambari_agent/TestShell.py
index 5dc1899..47923bd 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestShell.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestShell.py
@@ -63,7 +63,7 @@
       test_process = subprocess.Popen(test_cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True)
       time.sleep(0.3) # Delay to allow subprocess to start
       # Check if processes are running
-      ps_cmd = """ps aux """
+      ps_cmd = """ps auxww """
       ps_process = subprocess.Popen(ps_cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True)
       (out, err) = ps_process.communicate()
       self.assertTrue(sleep_cmd in out)
diff --git a/ambari-agent/src/test/python/resource_management/TestScript.py b/ambari-agent/src/test/python/resource_management/TestScript.py
index 65f8c2d..d531314 100644
--- a/ambari-agent/src/test/python/resource_management/TestScript.py
+++ b/ambari-agent/src/test/python/resource_management/TestScript.py
@@ -17,27 +17,15 @@
 See the License for the specific language governing permissions and
 limitations under the License.
 '''
-import ConfigParser
-import os
-
-import pprint
-
-from unittest import TestCase
-import threading
-import tempfile
-import time
-from threading import Thread
-
-
 import StringIO
-import sys, logging, pprint
-from ambari_agent import AgentException
+import sys, pprint
 from resource_management.libraries.script import Script
 from resource_management.core.environment import Environment
-from mock.mock import MagicMock, patch
+from mock.mock import patch
+from stacks.utils.RMFTestCase import *
+import logging
 
-
-class TestScript(TestCase):
+class TestScript(RMFTestCase):
 
   def setUp(self):
     # disable stdout
@@ -116,6 +104,40 @@
     self.assertEqual(open_mock.call_count, 3)
     self.assertEqual(Script.structuredOut, {"1": "3", "2": "2"})
 
+  @patch("__builtin__.open")
+  def test_status_commands_clear_structured_out(self, open_mock):
+    """
+    Tests that status commands will clear and stored structured output from prior status commands.
+    :param open_mock: 
+    :return: 
+    """
+    class MagicFile(object):
+      def read(self):
+        return "{}"
+
+      def write(self, data):
+        pass
+
+      def __exit__(self, exc_type, exc_val, exc_tb):
+        pass
+
+      def __enter__(self):
+        return self
+
+    sys.argv = ["", "status", "foo.py", "", "", "INFO", ""]
+    open_mock.side_effect = [MagicFile()]
+
+    try:
+      with Environment(".", test_mode=True) as env:
+        script = Script()
+        Script.structuredOut = { "version" : "old_version" }
+        script.execute()
+    except:
+      pass
+
+    self.assertTrue(open_mock.called)
+    self.assertEquals({}, Script.structuredOut)
+
   def tearDown(self):
     # enable stdout
     sys.stdout = sys.__stdout__
diff --git a/ambari-common/src/main/python/ambari_commons/inet_utils.py b/ambari-common/src/main/python/ambari_commons/inet_utils.py
index d44107d..66f6544 100644
--- a/ambari-common/src/main/python/ambari_commons/inet_utils.py
+++ b/ambari-common/src/main/python/ambari_commons/inet_utils.py
@@ -183,23 +183,42 @@
       return '127.0.0.1'
   return address
 
-def ensure_ssl_using_protocol(protocol):
+def ensure_ssl_using_protocol(protocol="PROTOCOL_TLSv1", ca_certs=None):
   """
   Monkey patching ssl module to force it use tls_v1. Do this in common module to avoid problems with
   PythonReflectiveExecutor.
+
   :param protocol: one of ("PROTOCOL_SSLv2", "PROTOCOL_SSLv3", "PROTOCOL_SSLv23", "PROTOCOL_TLSv1", "PROTOCOL_TLSv1_1", "PROTOCOL_TLSv1_2")
+  :param ca_certs: path to ca_certs file
   :return:
   """
   from functools import wraps
   import ssl
-  if hasattr(ssl.wrap_socket, "_ambari_patched"):
-    return # do not create chain of wrappers, patch only once
-  def sslwrap(func):
-    @wraps(func)
-    def bar(*args, **kw):
-      import ssl
-      kw['ssl_version'] = getattr(ssl, protocol)
-      return func(*args, **kw)
-    bar._ambari_patched = True
-    return bar
-  ssl.wrap_socket = sslwrap(ssl.wrap_socket)
+
+  if not hasattr(ssl.wrap_socket, "_ambari_patched"):
+    def sslwrap(func):
+      @wraps(func)
+      def bar(*args, **kw):
+        import ssl
+        kw['ssl_version'] = getattr(ssl, protocol)
+        if ca_certs and not 'ca_certs' in kw:
+          kw['ca_certs'] = ca_certs
+          kw['cert_reqs'] = ssl.CERT_REQUIRED
+        return func(*args, **kw)
+      bar._ambari_patched = True
+      return bar
+    ssl.wrap_socket = sslwrap(ssl.wrap_socket)
+
+  # python 2.7 stuff goes here
+  if hasattr(ssl, "_create_default_https_context"):
+    if not hasattr(ssl._create_default_https_context, "_ambari_patched"):
+      @wraps(ssl._create_default_https_context)
+      def _create_default_https_context_patched():
+        context = ssl.SSLContext(protocol = getattr(ssl, protocol))
+        if ca_certs:
+          context.load_verify_locations(ca_certs)
+          context.verify_mode = ssl.CERT_REQUIRED
+          context.check_hostname = False
+        return context
+      _create_default_https_context_patched._ambari_patched = True
+      ssl._create_default_https_context = _create_default_https_context_patched
diff --git a/ambari-common/src/main/python/ambari_commons/network.py b/ambari-common/src/main/python/ambari_commons/network.py
index 4c589f3..edb9add 100644
--- a/ambari-common/src/main/python/ambari_commons/network.py
+++ b/ambari-common/src/main/python/ambari_commons/network.py
@@ -42,30 +42,20 @@
     self.sock = ssl.wrap_socket(conn_socket, self.key_file, self.cert_file,
                                 ssl_version=self.ssl_version)
 
-def get_http_connection(host, port, https_enabled=False, ca_certs=None):
+def get_http_connection(host, port, https_enabled=False, ca_certs=None, ssl_version = ssl.PROTOCOL_SSLv23):
   if https_enabled:
-    ssl_version = ssl.PROTOCOL_SSLv23
     if ca_certs:
-      ssl_version = check_ssl_certificate_and_return_ssl_version(host, port, ca_certs)
+      check_ssl_certificate_and_return_ssl_version(host, port, ca_certs, ssl_version)
     return HTTPSConnectionWithCustomSslVersion(host, port, ssl_version)
   else:
     return httplib.HTTPConnection(host, port)
 
-def check_ssl_certificate_and_return_ssl_version(host, port, ca_certs):
+def check_ssl_certificate_and_return_ssl_version(host, port, ca_certs, ssl_version = ssl.PROTOCOL_SSLv23):
   try:
-    # Try with TLSv1 first.
-    ssl_version = ssl.PROTOCOL_TLSv1
     ssl.get_server_certificate((host, port), ssl_version=ssl_version, ca_certs=ca_certs)
   except ssl.SSLError as ssl_error:
-    print_warning_msg("Failed to verify the SSL certificate for https://{0}:{1} with CA certificate in {2} using ssl.PROTOCOL_TLSv1."
-                      " Trying to use less secure ssl.PROTOCOL_SSLv23. Error : {3}".format(host, port, ca_certs, str(ssl_error)))
-    try:
-      # Try with SSLv23 only if TLSv1 failed.
-      ssl_version = ssl.PROTOCOL_SSLv23
-      ssl.get_server_certificate((host, port), ssl_version=ssl_version, ca_certs=ca_certs)
-    except ssl.SSLError as ssl_error:
-      raise Fail("Failed to verify the SSL certificate for https://{0}:{1} with CA certificate in {2}. Error : {3}"
-               .format(host, port, ca_certs, str(ssl_error)))
+    raise Fail("Failed to verify the SSL certificate for https://{0}:{1} with CA certificate in {2}. Error : {3}"
+             .format(host, port, ca_certs, str(ssl_error)))
   return ssl_version
 
 
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index ce00f0c..facf186 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -356,11 +356,16 @@
               then the Atlas RPM will not be able to copy its artifacts into /etc/atlas/conf directory and therefore
               prevent Ambari from by copying those unmanaged contents into /etc/atlas/$version/0
               '''
-              parent_dir = os.path.dirname(current_dir)
-              if os.path.exists(parent_dir):
-                Link(conf_dir, to=current_dir)
+              component_list = default("/localComponents", [])
+              if "ATLAS_SERVER" in component_list or "ATLAS_CLIENT" in component_list:
+                Logger.info("Atlas is installed on this host.")
+                parent_dir = os.path.dirname(current_dir)
+                if os.path.exists(parent_dir):
+                  Link(conf_dir, to=current_dir)
+                else:
+                  Logger.info("Will not create symlink from {0} to {1} because the destination's parent dir does not exist.".format(conf_dir, current_dir))
               else:
-                Logger.info("Will not create symlink from {0} to {1} because the destination's parent dir does not exist.".format(conf_dir, current_dir))
+                Logger.info("Will not create symlink from {0} to {1} because Atlas is not installed on this host.".format(conf_dir, current_dir))
             else:
               # Normal path for other packages
               Link(conf_dir, to=current_dir)
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py b/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
index 557db58..72bc5c6 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
@@ -62,6 +62,7 @@
 def curl_krb_request(tmp_dir, keytab, principal, url, cache_file_prefix,
     krb_exec_search_paths, return_only_http_code, caller_label, user,
     connection_timeout = CONNECTION_TIMEOUT_DEFAULT,
+    ca_certs = None,
     kinit_timer_ms=DEFAULT_KERBEROS_KINIT_TIMER_MS, method = '',body='',header=''):
   """
   Makes a curl request using the kerberos credentials stored in a calculated cache file. The
@@ -84,13 +85,20 @@
   :param caller_label: an identifier to give context into the caller of this module (used for logging)
   :param user: the user to invoke the curl command as
   :param connection_timeout: if specified, a connection timeout for curl (default 10 seconds)
+  :param ca_certs: path to certificates
   :param kinit_timer_ms: if specified, the time (in ms), before forcing a kinit even if the
                          klist cache is still valid.
   :return:
   """
 
   import uuid
-
+  # backward compatibility with old code and management packs, etc. All new code need pass ca_certs explicitly
+  if ca_certs is None:
+    try:
+      from ambari_agent.AmbariConfig import AmbariConfig
+      ca_certs = AmbariConfig.get_resolved_config().get_ca_cert_file_path()
+    except:
+      pass
   # start off false
   is_kinit_required = False
 
@@ -174,13 +182,16 @@
   connection_timeout = int(connection_timeout)
   maximum_timeout = connection_timeout + 2
 
+  ssl_options = ['-k']
+  if ca_certs:
+    ssl_options = ['--cacert', ca_certs]
   try:
     if return_only_http_code:
-      _, curl_stdout, curl_stderr = get_user_call_output(['curl', '--location-trusted', '-k', '--negotiate', '-u', ':', '-b', cookie_file, '-c', cookie_file, '-w',
+      _, curl_stdout, curl_stderr = get_user_call_output(['curl', '--location-trusted'] + ssl_options + ['--negotiate', '-u', ':', '-b', cookie_file, '-c', cookie_file, '-w',
                              '%{http_code}', url, '--connect-timeout', str(connection_timeout), '--max-time', str(maximum_timeout), '-o', '/dev/null'],
                              user=user, env=kerberos_env)
     else:
-      curl_command = ['curl', '--location-trusted', '-k', '--negotiate', '-u', ':', '-b', cookie_file, '-c', cookie_file,
+      curl_command = ['curl', '--location-trusted'] + ssl_options + ['--negotiate', '-u', ':', '-b', cookie_file, '-c', cookie_file,
                       url, '--connect-timeout', str(connection_timeout), '--max-time', str(maximum_timeout)]
       # returns response body
       if len(method) > 0 and len(body) == 0 and len(header) == 0:
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/decorator.py b/ambari-common/src/main/python/resource_management/libraries/functions/decorator.py
index 55cf335..b5b804d 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/decorator.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/decorator.py
@@ -26,13 +26,15 @@
 from resource_management.core.logger import Logger
 
 
-def retry(times=3, sleep_time=1, max_sleep_time=8, backoff_factor=1, err_class=Exception):
+def retry(times=3, sleep_time=1, max_sleep_time=8, backoff_factor=1, err_class=Exception, timeout_func=None):
   """
   Retry decorator for improved robustness of functions.
-  :param times: Number of times to attempt to call the function.
+  :param times: Number of times to attempt to call the function.  Optionally specify the timeout_func.
   :param sleep_time: Initial sleep time between attempts
   :param backoff_factor: After every failed attempt, multiple the previous sleep time by this factor.
   :param err_class: Exception class to handle
+  :param timeout_func: used when the 'times' argument should be computed.  this function should
+         return an integer value that indicates the number of seconds to wait
   :return: Returns the output of the wrapped function.
   """
   def decorator(function):
@@ -42,6 +44,10 @@
       _backoff_factor = backoff_factor
       _err_class = err_class
 
+      if timeout_func is not None:
+        timeout = timeout_func()
+        _times = timeout // sleep_time  # ensure we end up with an integer
+
       while _times > 1:
         _times -= 1
         try:
@@ -49,7 +55,8 @@
         except _err_class, err:
           Logger.info("Will retry %d time(s), caught exception: %s. Sleeping for %d sec(s)" % (_times, str(err), _sleep_time))
           time.sleep(_sleep_time)
-        if(_sleep_time * _backoff_factor <= max_sleep_time):
+
+        if _sleep_time * _backoff_factor <= max_sleep_time:
           _sleep_time *= _backoff_factor
 
       return function(*args, **kwargs)
@@ -57,15 +64,17 @@
   return decorator
 
 
-def safe_retry(times=3, sleep_time=1, max_sleep_time=8, backoff_factor=1, err_class=Exception, return_on_fail=None):
+def safe_retry(times=3, sleep_time=1, max_sleep_time=8, backoff_factor=1, err_class=Exception, return_on_fail=None, timeout_func=None):
   """
   Retry decorator for improved robustness of functions. Instead of error generation on the last try, will return
   return_on_fail value.
-  :param times: Number of times to attempt to call the function.
+  :param times: Number of times to attempt to call the function.  Optionally specify the timeout_func.
   :param sleep_time: Initial sleep time between attempts
   :param backoff_factor: After every failed attempt, multiple the previous sleep time by this factor.
   :param err_class: Exception class to handle
   :param return_on_fail value to return on the last try
+  :param timeout_func: used when the 'times' argument should be computed.  this function should
+         return an integer value that indicates the number of seconds to wait
   :return: Returns the output of the wrapped function.
   """
   def decorator(function):
@@ -76,6 +85,10 @@
       _err_class = err_class
       _return_on_fail = return_on_fail
 
+      if timeout_func is not None:
+        timeout = timeout_func()
+        _times = timeout // sleep_time  # ensure we end up with an integer
+
       while _times > 1:
         _times -= 1
         try:
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/jmx.py b/ambari-common/src/main/python/resource_management/libraries/functions/jmx.py
index 9a4ff5f..dbd0092 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/jmx.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/jmx.py
@@ -23,7 +23,7 @@
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions.get_user_call_output import get_user_call_output
 
-def get_value_from_jmx(qry, property, security_enabled, run_user, is_https_enabled):
+def get_value_from_jmx(qry, property, security_enabled, run_user, is_https_enabled, last_retry=True):
   try:
     if security_enabled:
       cmd = ['curl', '--negotiate', '-u', ':', '-s']
@@ -41,5 +41,6 @@
       data_dict = json.loads(data)
       return data_dict["beans"][0][property]
   except:
-    Logger.logger.exception("Getting jmx metrics from NN failed. URL: " + str(qry))
-    return None
\ No newline at end of file
+    if last_retry:
+      Logger.logger.exception("Getting jmx metrics from NN failed. URL: " + str(qry))
+    return None
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py b/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
index 665a8e4..8a2ff25 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
@@ -51,7 +51,7 @@
   @retry(times=times, sleep_time=sleep_time, backoff_factor=backoff_factor, err_class=Fail)
   def doRetries(hdfs_site, security_enabled, run_user):
     doRetries.attempt += 1
-    active_namenodes, standby_namenodes, unknown_namenodes = get_namenode_states_noretries(hdfs_site, security_enabled, run_user)
+    active_namenodes, standby_namenodes, unknown_namenodes = get_namenode_states_noretries(hdfs_site, security_enabled, run_user, doRetries.attempt == times)
     Logger.info(
       "NameNode HA states: active_namenodes = {0}, standby_namenodes = {1}, unknown_namenodes = {2}".format(
         active_namenodes, standby_namenodes, unknown_namenodes))
@@ -65,7 +65,7 @@
   doRetries.attempt = 0
   return doRetries(hdfs_site, security_enabled, run_user)
 
-def get_namenode_states_noretries(hdfs_site, security_enabled, run_user):
+def get_namenode_states_noretries(hdfs_site, security_enabled, run_user, last_retry=True):
   """
   return format [('nn1', 'hdfs://hostname1:port1'), ('nn2', 'hdfs://hostname2:port2')] , [....], [....]
   """
@@ -102,7 +102,7 @@
 
       jmx_uri = JMX_URI_FRAGMENT.format(protocol, value)
       
-      state = get_value_from_jmx(jmx_uri, 'tag.HAState', security_enabled, run_user, is_https_enabled)
+      state = get_value_from_jmx(jmx_uri, 'tag.HAState', security_enabled, run_user, is_https_enabled, last_retry)
       # If JMX parsing failed
       if not state:
         check_service_cmd = "hdfs haadmin -ns {0} -getServiceState {1}".format(get_nameservice(hdfs_site), nn_unique_id)
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py b/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
index c510dac..c80c577 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
@@ -49,7 +49,7 @@
                         xa_audit_db_password, ssl_truststore_password,
                         ssl_keystore_password, api_version=None, stack_version_override = None, skip_if_rangeradmin_down = True,
                         is_security_enabled = False, is_stack_supports_ranger_kerberos = False,
-                        component_user_principal = None, component_user_keytab = None):
+                        component_user_principal = None, component_user_keytab = None, cred_lib_path_override = None, cred_setup_prefix_override = None):
 
   if audit_db_is_enabled and component_driver_curl_source is not None and not component_driver_curl_source.endswith("/None"):
     if previous_jdbc_jar and os.path.isfile(previous_jdbc_jar):
@@ -69,8 +69,10 @@
 
   if policymgr_mgr_url.endswith('/'):
     policymgr_mgr_url = policymgr_mgr_url.rstrip('/')
-  stack_version = get_stack_version(component_select_name)
-  if stack_version_override is not None:
+
+  if stack_version_override is None:
+    stack_version = get_stack_version(component_select_name)
+  else:
     stack_version = stack_version_override
 
   component_conf_dir = conf_dict
@@ -187,7 +189,7 @@
 
     setup_ranger_plugin_keystore(service_name, audit_db_is_enabled, stack_version, credential_file,
               xa_audit_db_password, ssl_truststore_password, ssl_keystore_password,
-              component_user, component_group, java_home)
+              component_user, component_group, java_home, cred_lib_path_override, cred_setup_prefix_override)
 
   else:
     File(format('{component_conf_dir}/ranger-security.xml'),
@@ -207,16 +209,20 @@
       sudo=True)
 
 def setup_ranger_plugin_keystore(service_name, audit_db_is_enabled, stack_version, credential_file, xa_audit_db_password,
-                                ssl_truststore_password, ssl_keystore_password, component_user, component_group, java_home):
+                                ssl_truststore_password, ssl_keystore_password, component_user, component_group, java_home, cred_lib_path_override = None, cred_setup_prefix_override = None):
 
   stack_root = Script.get_stack_root()
   service_name = str(service_name).lower()
-  cred_lib_path = format('{stack_root}/{stack_version}/ranger-{service_name}-plugin/install/lib/*')
-  cred_setup_prefix = (format('{stack_root}/{stack_version}/ranger-{service_name}-plugin/ranger_credential_helper.py'), '-l', cred_lib_path)
 
-  if service_name == 'nifi':
-    cred_lib_path = format('{stack_root}/{stack_version}/{service_name}/ext/ranger/install/lib/*')
-    cred_setup_prefix = (format('{stack_root}/{stack_version}/{service_name}/ext/ranger/scripts/ranger_credential_helper.py'), '-l', cred_lib_path)
+  if cred_lib_path_override is not None:
+    cred_lib_path = cred_lib_path_override
+  else:
+    cred_lib_path = format('{stack_root}/{stack_version}/ranger-{service_name}-plugin/install/lib/*')
+
+  if cred_setup_prefix_override is not None:
+    cred_setup_prefix = cred_setup_prefix_override
+  else:
+    cred_setup_prefix = (format('{stack_root}/{stack_version}/ranger-{service_name}-plugin/ranger_credential_helper.py'), '-l', cred_lib_path)
 
   if audit_db_is_enabled:
     cred_setup = cred_setup_prefix + ('-f', credential_file, '-k', 'auditDBCred', '-v', PasswordString(xa_audit_db_password), '-c', '1')
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py b/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py
index ff00a1f..79dc874 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py
@@ -78,6 +78,46 @@
 
   return version
 
+def get_component_version_with_stack_selector(stack_selector_path, component_name):
+  """
+   For specific cases where we deal with HDP add on services from a management pack, the version
+   needs to be determined by using the specific stack selector itself.
+   :param stack_selector_path: /usr/bin/hdf-select
+   Comes from the service which calls for this function.
+   :param component_name: Component name as a string necessary to get the version
+   :return: Returns a string if found, e.g., 2.2.1.0-2175, otherwise, returns None
+   This function can be called by custom services, hence should not be removed
+  """
+  version = None
+  out = None
+  code = -1
+  if not stack_selector_path:
+    Logger.error("Stack selector path not provided")
+  elif not os.path.exists(stack_selector_path):
+    Logger.error("Stack selector path does not exist")
+  elif not component_name:
+    Logger.error("Component name not provided")
+  else:
+    tmpfile = tempfile.NamedTemporaryFile()
+
+    get_stack_comp_version_cmd = ""
+    try:
+      # This is necessary because Ubuntu returns "stdin: is not a tty", see AMBARI-8088
+      with open(tmpfile.name, 'r') as file:
+        get_stack_comp_version_cmd = '{0} status {1} > {2}' .format(stack_selector_path, component_name, tmpfile.name)
+        code, stdoutdata = shell.call(get_stack_comp_version_cmd, quiet=True)
+        out = file.read()
+
+      if code != 0 or out is None:
+        raise Exception("Code is nonzero or output is empty")
+
+      Logger.debug("Command: %s\nOutput: %s" % (get_stack_comp_version_cmd, str(out)))
+      matches = re.findall(r"([\d\.]+\-\d+)", out)
+      version = matches[0] if matches and len(matches) > 0 else None
+    except Exception, e:
+      Logger.error("Could not determine stack version for component %s by calling '%s'. Return Code: %s, Output: %s." %
+                   (component_name, get_stack_comp_version_cmd, str(code), str(out)))
+  return version
 
 def get_versions_from_stack_root(stack_root):
   """
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index 5fa9ec4..bcad6c3 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -28,6 +28,7 @@
 import platform
 import inspect
 import tarfile
+import time
 from optparse import OptionParser
 import resource_management
 from ambari_commons import OSCheck, OSConst
@@ -35,6 +36,7 @@
 from ambari_commons.constants import UPGRADE_TYPE_ROLLING
 from ambari_commons.constants import UPGRADE_TYPE_HOST_ORDERED
 from ambari_commons.network import reconfigure_urllib2_opener
+from ambari_commons.inet_utils import resolve_address, ensure_ssl_using_protocol
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from resource_management.libraries.resources import XmlConfig
 from resource_management.libraries.resources import PropertiesFile
@@ -150,6 +152,7 @@
   # Class variable
   tmp_dir = ""
   force_https_protocol = "PROTOCOL_TLSv1"
+  ca_cert_file_path = None
 
   def load_structured_out(self):
     Script.structuredOut = {}
@@ -179,7 +182,7 @@
         json.dump(Script.structuredOut, fp)
     except IOError, err:
       Script.structuredOut.update({"errMsg" : "Unable to write to " + self.stroutfile})
-      
+
   def get_component_name(self):
     """
     To be overridden by subclasses.
@@ -217,7 +220,7 @@
     """
     stack_name = Script.get_stack_name()
     component_name = self.get_component_name()
-    
+
     if component_name and stack_name:
       component_version = get_component_version(stack_name, component_name)
 
@@ -260,15 +263,15 @@
     parser.add_option("-o", "--out-files-logging", dest="log_out_files", action="store_true",
                       help="use this option to enable outputting *.out files of the service pre-start")
     (self.options, args) = parser.parse_args()
-    
+
     self.log_out_files = self.options.log_out_files
-    
+
     # parse arguments
     if len(args) < 6:
      print "Script expects at least 6 arguments"
      print USAGE.format(os.path.basename(sys.argv[0])) # print to stdout
      sys.exit(1)
-     
+
     self.command_name = str.lower(sys.argv[1])
     self.command_data_file = sys.argv[2]
     self.basedir = sys.argv[3]
@@ -276,9 +279,11 @@
     self.load_structured_out()
     self.logging_level = sys.argv[5]
     Script.tmp_dir = sys.argv[6]
-    # optional script argument for forcing https protocol
+    # optional script arguments for forcing https protocol and ca_certs file
     if len(sys.argv) >= 8:
       Script.force_https_protocol = sys.argv[7]
+    if len(sys.argv) >= 9:
+      Script.ca_cert_file_path = sys.argv[8]
 
     logging_level_str = logging._levelNames[self.logging_level]
     Logger.initialize_logger(__name__, logging_level=logging_level_str)
@@ -289,6 +294,16 @@
     if OSCheck.is_windows_family():
       reload_windows_env()
 
+    # !!! status commands re-use structured output files; if the status command doesn't update the
+    # the file (because it doesn't have to) then we must ensure that the file is reset to prevent
+    # old, stale structured output from a prior status command from being used
+    if self.command_name == "status":
+      Script.structuredOut = {}
+      self.put_structured_out({})
+
+    # make sure that script has forced https protocol and ca_certs file passed from agent
+    ensure_ssl_using_protocol(Script.get_force_https_protocol_name(), Script.get_ca_cert_file_path())
+
     try:
       with open(self.command_data_file) as f:
         pass
@@ -308,50 +323,68 @@
       method = self.choose_method_to_execute(self.command_name)
       with Environment(self.basedir, tmp_dir=Script.tmp_dir) as env:
         env.config.download_path = Script.tmp_dir
-        
-        if self.command_name == "start" and not self.is_hook():
-          self.pre_start()
-        
+
+        if not self.is_hook():
+          self.execute_prefix_function(self.command_name, 'pre', env)
+
         method(env)
 
-        if self.command_name == "start" and not self.is_hook():
-          self.post_start()
+        if not self.is_hook():
+          self.execute_prefix_function(self.command_name, 'post', env)
+
     except Fail as ex:
       ex.pre_raise()
       raise
     finally:
       if self.should_expose_component_version(self.command_name):
         self.save_component_version_to_structured_out()
-        
+
+  def execute_prefix_function(self, command_name, afix, env):
+    """
+    Execute action afix (prefix or suffix) based on command_name and afix type
+    example: command_name=start, afix=pre will result in execution of self.pre_start(env) if exists
+    """
+    self_methods = dir(self)
+    method_name = "{0}_{1}".format(afix, command_name)
+    if not method_name in self_methods:
+      Logger.logger.debug("Action afix '{0}' not present".format(method_name))
+      return
+    Logger.logger.debug("Execute action afix: {0}".format(method_name))
+    method = getattr(self, method_name)
+    method(env)
+
   def is_hook(self):
     from resource_management.libraries.script.hook import Hook
     return (Hook in self.__class__.__bases__)
-        
+
   def get_log_folder(self):
     return ""
-  
+
   def get_user(self):
     return ""
 
   def get_pid_files(self):
     return []
-        
-  def pre_start(self):
+
+  def pre_start(self, env=None):
+    """
+    Executed before any start method. Posts contents of relevant *.out files to command execution log.
+    """
     if self.log_out_files:
       log_folder = self.get_log_folder()
       user = self.get_user()
-      
+
       if log_folder == "":
         Logger.logger.warn("Log folder for current script is not defined")
         return
-      
+
       if user == "":
         Logger.logger.warn("User for current script is not defined")
         return
-      
+
       show_logs(log_folder, user, lines_count=COUNT_OF_LAST_LINES_OF_OUT_FILES_LOGGED, mask=OUT_FILES_MASK)
 
-  def post_start(self):
+  def post_start(self, env=None):
     pid_files = self.get_pid_files()
     if pid_files == []:
       Logger.logger.warning("Pid files for current script are not defined")
@@ -366,6 +399,32 @@
 
     Logger.info("Component has started with pid(s): {0}".format(', '.join(pids)))
 
+  def post_stop(self, env):
+    """
+    Executed after completion of every stop method. Waits until component is actually stopped (check is performed using
+     components status() method.
+    """
+    self_methods = dir(self)
+
+    if not 'status' in self_methods:
+      pass
+    status_method = getattr(self, 'status')
+    component_is_stopped = False
+    counter = 0
+    while not component_is_stopped :
+      try:
+        if counter % 100 == 0:
+          Logger.logger.info("Waiting for actual component stop")
+        status_method(env)
+        time.sleep(0.1)
+        counter += 1
+      except ComponentIsNotRunning, e:
+        Logger.logger.debug("'status' reports ComponentIsNotRunning")
+        component_is_stopped = True
+      except ClientComponentHasNoStatus, e:
+        Logger.logger.debug("Client component has no status")
+        component_is_stopped = True
+
   def choose_method_to_execute(self, command_name):
     """
     Returns a callable object that should be executed for a given command.
@@ -375,7 +434,7 @@
       raise Fail("Script '{0}' has no method '{1}'".format(sys.argv[0], command_name))
     method = getattr(self, command_name)
     return method
-  
+
   def get_stack_version_before_packages_installed(self):
     """
     This works in a lazy way (calculates the version first time and stores it). 
@@ -392,7 +451,7 @@
     if not Script.stack_version_from_distro_select and component_name:
       from resource_management.libraries.functions import stack_select
       Script.stack_version_from_distro_select = stack_select.get_stack_version_before_install(component_name)
-      
+
     # If <stack-selector-tool> has not yet been done (situations like first install),
     # we can use <stack-selector-tool> version itself.
     # Wildcards cause a lot of troubles with installing packages, if the version contains wildcards we should try to specify it.
@@ -403,7 +462,7 @@
               stack_tools.get_stack_tool_package(stack_tools.STACK_SELECTOR_NAME))
 
     return Script.stack_version_from_distro_select
-  
+
   def format_package_name(self, name):
     from resource_management.libraries.functions.default import default
     """
@@ -440,7 +499,7 @@
       stack_version_package_formatted = self.get_stack_version_before_packages_installed().replace('.', package_delimiter).replace('-', package_delimiter) if STACK_VERSION_PLACEHOLDER in name else name
 
     package_name = name.replace(STACK_VERSION_PLACEHOLDER, stack_version_package_formatted)
-    
+
     return package_name
 
   @staticmethod
@@ -465,10 +524,34 @@
     return Script.tmp_dir
 
   @staticmethod
-  def get_force_https_protocol():
+  def get_force_https_protocol_name():
+    """
+    Get forced https protocol name.
+
+    :return: protocol name, PROTOCOL_TLSv1 by default
+    """
     return Script.force_https_protocol
 
   @staticmethod
+  def get_force_https_protocol_value():
+    """
+    Get forced https protocol value that correspondents to ssl module variable.
+
+    :return: protocol value
+    """
+    import ssl
+    return getattr(ssl, Script.get_force_https_protocol_name())
+
+  @staticmethod
+  def get_ca_cert_file_path():
+    """
+    Get path to file with trusted certificates.
+
+    :return: trusted certificates file path
+    """
+    return Script.ca_cert_file_path
+
+  @staticmethod
   def get_component_from_role(role_directory_map, default_role):
     """
     Gets the <stack-root>/current/<component> component given an Ambari role,
@@ -639,13 +722,13 @@
                           hadoop_user, self.get_password(hadoop_user),
                           str(config['hostLevelParams']['stack_version']))
       reload_windows_env()
-      
+
   def check_package_condition(self, package):
     condition = package['condition']
-    
+
     if not condition:
       return True
-    
+
     return self.should_install_package(package)
 
   def should_install_package(self, package):
@@ -782,7 +865,7 @@
 
       # To remain backward compatible with older stacks, only pass upgrade_type if available.
       # TODO, remove checking the argspec for "upgrade_type" once all of the services support that optional param.
-      self.pre_start()
+      self.pre_start(env)
       if "upgrade_type" in inspect.getargspec(self.start).args:
         self.start(env, upgrade_type=upgrade_type)
       else:
@@ -790,7 +873,7 @@
           self.start(env, rolling_restart=(upgrade_type == UPGRADE_TYPE_ROLLING))
         else:
           self.start(env)
-      self.post_start()
+      self.post_start(env)
 
       if is_stack_upgrade:
         # Remain backward compatible with the rest of the services that haven't switched to using
@@ -819,22 +902,6 @@
     """
     self.fail_with_error('configure method isn\'t implemented')
 
-  def security_status(self, env):
-    """
-    To be overridden by subclasses to provide the current security state of the component.
-    Implementations are required to set the "securityState" property of the structured out data set
-    to one of the following values:
-
-      UNSECURED        - If the component is not configured for any security protocol such as
-                         Kerberos
-      SECURED_KERBEROS - If the component is configured for Kerberos
-      UNKNOWN          - If the security state cannot be determined
-      ERROR            - If the component is supposed to be secured, but there are issues with the
-                         configuration.  For example, if the component is configured for Kerberos
-                         but the configured principal and keytab file fail to kinit
-    """
-    self.put_structured_out({"securityState": "UNKNOWN"})
-
   def generate_configs_get_template_file_content(self, filename, dicts):
     config = self.get_config()
     content = ''
@@ -852,7 +919,7 @@
     config = self.get_config()
     return {'configurations':config['configurations'][dict],
             'configuration_attributes':config['configuration_attributes'][dict]}
-    
+
   def generate_configs_get_xml_file_dict(self, filename, dict):
     config = self.get_config()
     return config['configurations'][dict]
@@ -864,7 +931,7 @@
     """
     import params
     env.set_params(params)
-    
+
     config = self.get_config()
 
     xml_configs_list = config['commandParams']['xml_configs_list']
@@ -882,19 +949,19 @@
         for filename, dict in file_dict.iteritems():
           XmlConfig(filename,
                     conf_dir=conf_tmp_dir,
-                    mode=0600,
+                    mode=0644,
                     **self.generate_configs_get_xml_file_content(filename, dict)
           )
       for file_dict in env_configs_list:
         for filename,dicts in file_dict.iteritems():
           File(os.path.join(conf_tmp_dir, filename),
-               mode=0600,
+               mode=0644,
                content=InlineTemplate(self.generate_configs_get_template_file_content(filename, dicts)))
 
       for file_dict in properties_configs_list:
         for filename, dict in file_dict.iteritems():
           PropertiesFile(os.path.join(conf_tmp_dir, filename),
-                         mode=0600,
+                         mode=0644,
                          properties=self.generate_configs_get_xml_file_dict(filename, dict)
           )
       with closing(tarfile.open(output_filename, "w:gz")) as tar:
diff --git a/ambari-funtest/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/mysql_service.py b/ambari-funtest/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/mysql_service.py
index 4716343..cf1d30e 100644
--- a/ambari-funtest/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/mysql_service.py
+++ b/ambari-funtest/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/mysql_service.py
@@ -31,6 +31,11 @@
   elif action == 'status':
     cmd = format('service {daemon_name} status')
     logoutput = False
+    try:
+      Execute(cmd)
+      return
+    except:
+      raise ComponentIsNotRunning()
   else:
     cmd = None
 
diff --git a/ambari-infra/.gitignore b/ambari-infra/.gitignore
new file mode 100644
index 0000000..a7d91c4
--- /dev/null
+++ b/ambari-infra/.gitignore
@@ -0,0 +1,6 @@
+target
+.settings
+.classpath
+.project
+/bin/
+job-repository.db
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-assembly/pom.xml b/ambari-infra/ambari-infra-assembly/pom.xml
index 51e5804..fafef7e 100644
--- a/ambari-infra/ambari-infra-assembly/pom.xml
+++ b/ambari-infra/ambari-infra-assembly/pom.xml
@@ -38,6 +38,10 @@
     <solr.client.mapping.path>${mapping.base.path}/${solr.client.package.name}</solr.client.mapping.path>
     <solr.client.dir>${project.basedir}/../ambari-infra-solr-client</solr.client.dir>
     <infra.solr.plugin.dir>${project.basedir}/../ambari-infra-solr-plugin</infra.solr.plugin.dir>
+    <infra-manager.package.name>ambari-infra-manager</infra-manager.package.name>
+    <infra-manager.dir>${project.basedir}/../ambari-infra-manager</infra-manager.dir>
+    <infra-manager.mapping.path>${mapping.base.path}/${infra-manager.package.name}</infra-manager.mapping.path>
+    <infra-manager.conf.mapping.path>/etc/${infra-manager.package.name}/conf</infra-manager.conf.mapping.path>
   </properties>
 
   <profiles>
@@ -118,6 +122,45 @@
                   </mappings>
                 </configuration>
               </execution>
+              <execution>
+                <id>infra-manager</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>rpm</goal>
+                </goals>
+                <configuration>
+                  <group>Development</group>
+                  <name>${infra-manager.package.name}</name>
+                  <mappings>
+                    <mapping>
+                      <directory>${infra-manager.mapping.path}</directory>
+                      <sources>
+                        <source>
+                          <location>${infra-manager.dir}/target/package</location>
+                          <excludes>
+                            <exclude>log4j.xml</exclude>
+                            <exclude>infra-manager.properties</exclude>
+                            <exclude>infra-manager-env.sh</exclude>
+                          </excludes>
+                        </source>
+                      </sources>
+                    </mapping>
+                    <mapping>
+                      <directory>${infra-manager.conf.mapping.path}</directory>
+                      <sources>
+                        <source>
+                          <location>${infra-manager.dir}/target/package</location>
+                          <includes>
+                            <include>log4j.xml</include>
+                            <include>infra-manager.properties</include>
+                            <include>infra-manager-env.sh</include>
+                          </includes>
+                        </source>
+                      </sources>
+                    </mapping>
+                  </mappings>
+                </configuration>
+              </execution>
             </executions>
           </plugin>
           <plugin>
@@ -277,6 +320,49 @@
                   </dataSet>
                 </configuration>
               </execution>
+
+              <execution>
+                <phase>package</phase>
+                <id>jdeb-infra-manager</id>
+                <goals>
+                  <goal>jdeb</goal>
+                </goals>
+                <configuration>
+                  <controlDir>${basedir}/src/main/package/deb/manager</controlDir>
+                  <deb>${basedir}/target/${infra-manager.package.name}_${package-version}-${package-release}.deb</deb>
+                  <skip>false</skip>
+                  <skipPOMs>false</skipPOMs>
+                  <dataSet>
+                    <data>
+                      <src>${infra-manager.dir}/target/ambari-infra-manager.tar.gz</src>
+                      <type>archive</type>
+                      <mapper>
+                        <type>perm</type>
+                        <user>root</user>
+                        <group>root</group>
+                        <prefix>${infra-manager.mapping.path}</prefix>
+                      </mapper>
+                      <excludes>
+                        log4j.xml,infra-manager.properties,infra-manager-env.sh
+                      </excludes>
+                    </data>
+                    <data>
+                      <src>${infra-manager.dir}/target/package</src>
+                      <type>directory</type>
+                      <mapper>
+                        <prefix>${infra-manager.conf.mapping.path}</prefix>
+                        <type>perm</type>
+                        <user>root</user>
+                        <group>root</group>
+                        <filemode>644</filemode>
+                      </mapper>
+                      <includes>
+                        log4j.xml,infra-manager.properties,infra-manager-env.sh
+                      </includes>
+                    </data>
+                  </dataSet>
+                </configuration>
+              </execution>
             </executions>
           </plugin>
           <plugin>
@@ -330,6 +416,11 @@
       <artifactId>ambari-infra-solr-plugin</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-infra-manager</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 
 
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/control b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/control
new file mode 100644
index 0000000..03663a0
--- /dev/null
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/control
@@ -0,0 +1,22 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+Package: [[infra-manager.package.name]]
+Version: [[package-version]]-[[package-release]]
+Section: [[deb.section]]
+Priority: [[deb.priority]]
+Depends: [[deb.dependency.list]]
+Architecture: [[deb.architecture]]
+Description: [[description]]
+Maintainer: [[deb.publisher]]
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst
new file mode 100644
index 0000000..21a01fa
--- /dev/null
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst
@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postrm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postrm
new file mode 100644
index 0000000..21a01fa
--- /dev/null
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postrm
@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/preinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/preinst
new file mode 100644
index 0000000..21a01fa
--- /dev/null
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/preinst
@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/prerm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/prerm
new file mode 100644
index 0000000..21a01fa
--- /dev/null
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/prerm
@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
diff --git a/ambari-infra/ambari-infra-manager/README.md b/ambari-infra/ambari-infra-manager/README.md
new file mode 100644
index 0000000..d3527c4
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/README.md
@@ -0,0 +1,31 @@
+<!--
+{% comment %}
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+{% endcomment %}
+-->
+
+# Ambari Infra Manager
+TODO
+## Build & Run Application
+```bash
+mvn clean package exec:java
+```
+
+## Build & Run Application in docker container
+```bash
+cd docker
+./infra-manager-docker.sh
+```
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/build.xml b/ambari-infra/ambari-infra-manager/build.xml
new file mode 100644
index 0000000..3d0f4da
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/build.xml
@@ -0,0 +1,54 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project basedir="." default="build" name="infra-manager">
+  <property environment="env"/>
+  <property name="debuglevel" value="source,lines,vars"/>
+  <dirname property="builddir" file="build.xml"/>
+  <property name="target" value="1.7"/>
+  <property name="source" value="1.7"/>
+  <target name="init">
+  </target>
+  <target name="build"/>
+
+  <target name="package">
+    <delete dir="target/package"/>
+    <copy todir="target/package/libs" includeEmptyDirs="no">
+      <fileset dir="target/libs"/>
+    </copy>
+    <copy todir="target/package/libs" includeEmptyDirs="no">
+      <fileset file="target/*.jar"/>
+    </copy>
+    <copy todir="target/package" includeEmptyDirs="no">
+      <fileset file="src/main/resources/infraManager.sh"/>
+      <fileset file="src/main/resources/infra-manager-env.sh"/>
+      <fileset file="target/classes/infra-manager.properties"/>
+      <fileset file="target/classes/log4j.xml"/>
+    </copy>
+    <chmod file="target/package/*.sh" perm="755"/>
+    <tar compression="gzip" destfile="target/ambari-infra-manager.tar.gz">
+      <tarfileset mode="755" dir="target/package">
+        <include name="*.sh"/>
+      </tarfileset>
+      <tarfileset mode="664" dir="target/package">
+        <exclude name="*.sh"/>
+      </tarfileset>
+    </tar>
+
+  </target>
+
+</project>
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/docker/Dockerfile b/ambari-infra/ambari-infra-manager/docker/Dockerfile
new file mode 100644
index 0000000..adb584a
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/docker/Dockerfile
@@ -0,0 +1,52 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+FROM centos:centos6
+
+RUN echo root:changeme | chpasswd
+
+RUN yum clean all -y && yum update -y
+RUN yum -y install vim wget rpm-build sudo which telnet tar openssh-server openssh-clients ntp git httpd lsof
+RUN rpm -e --nodeps --justdb glibc-common
+RUN yum -y install glibc-common
+
+ENV HOME /root
+
+#Install JAVA
+ENV JAVA_VERSION 8u31
+ENV BUILD_VERSION b13
+RUN wget --no-cookies --no-check-certificate --header "Cookie: oraclelicense=accept-securebackup-cookie" "http://download.oracle.com/otn-pub/java/jdk/$JAVA_VERSION-$BUILD_VERSION/jdk-$JAVA_VERSION-linux-x64.rpm" -O jdk-8-linux-x64.rpm
+RUN rpm -ivh jdk-8-linux-x64.rpm
+ENV JAVA_HOME /usr/java/default/
+
+#Install Maven
+RUN mkdir -p /opt/maven
+WORKDIR /opt/maven
+RUN wget http://archive.apache.org/dist/maven/maven-3/3.3.1/binaries/apache-maven-3.3.1-bin.tar.gz
+RUN tar -xvzf /opt/maven/apache-maven-3.3.1-bin.tar.gz
+RUN rm -rf /opt/maven/apache-maven-3.3.1-bin.tar.gz
+
+ENV M2_HOME /opt/maven/apache-maven-3.3.1
+ENV MAVEN_OPTS -Xmx2048m
+ENV PATH $PATH:$JAVA_HOME/bin:$M2_HOME/bin
+
+# SSH key
+RUN ssh-keygen -f /root/.ssh/id_rsa -t rsa -N ''
+RUN cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys
+RUN chmod 600 /root/.ssh/authorized_keys
+RUN sed -ri 's/UsePAM yes/UsePAM no/g' /etc/ssh/sshd_config
+
+ADD bin/start.sh /root/start.sh
+RUN chmod +x /root/start.sh
+
+WORKDIR /root
+CMD /root/start.sh
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/docker/bin/start.sh b/ambari-infra/ambari-infra-manager/docker/bin/start.sh
new file mode 100755
index 0000000..076c06f
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/docker/bin/start.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+export INFRA_MANAGER_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=5007,server=y,suspend=n"
+touch /root/infra-manager.log
+/root/ambari-infra-manager/infraManager.sh --port 61890 > /root/infra-manager.log
+tail -f /root/infra-manager.log
+
diff --git a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh b/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh
new file mode 100755
index 0000000..87d6b8a
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh
@@ -0,0 +1,85 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+sdir="`dirname \"$0\"`"
+: ${1:?"argument is missing: (start|stop|build-and-run|build|build-docker-and-run|build-mvn-and-run|build-docker-only|build-mvn-only)"}
+command="$1"
+
+function build_infra_manager_container() {
+  pushd $sdir
+  docker build -t ambari-infra-manager:v1.0 .
+  popd
+}
+
+function build_infra_manager_project() {
+  pushd $sdir/../
+  mvn clean package -DskipTests
+  popd
+}
+
+function kill_infra_manager_container() {
+  echo "Try to remove infra manager container if exists ..."
+  docker rm -f infra-manager
+}
+
+function start_infra_manager_container() {
+ echo "Start infra manager container ..."
+ pushd $sdir/../
+ local AMBARI_INFRA_MANAGER_LOCATION=$(pwd)
+ popd
+ kill_infra_manager_container
+ docker run -d --name infra-manager --hostname infra-manager.apache.org \
+   -v $AMBARI_INFRA_MANAGER_LOCATION/target/package:/root/ambari-infra-manager -p 61890:61890 -p 5007:5007 \
+   ambari-infra-manager:v1.0
+  ip_address=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' logsearch)
+  echo "Ambari Infra Manager container started on $ip_address (for Mac OSX route to boot2docker/docker-machine VM address, e.g.: 'sudo route add -net 172.17.0.0/16 192.168.59.103')"
+  echo "You can follow Log Search logs with 'docker logs -f infra-manager' command"
+}
+
+case $command in
+  "build-and-run")
+     build_infra_manager_project
+     build_infra_manager_container
+     start_infra_manager_container
+     ;;
+  "build")
+     build_infra_manager_project
+     start_infra_manager_container
+     ;;
+  "build-docker-and-run")
+     build_infra_manager_container
+     start_infra_manager_container
+     ;;
+  "build-mvn-and-run")
+     build_infra_manager_project
+     build_infra_manager_container
+     ;;
+  "build-docker-only")
+     build_infra_manager_container
+     ;;
+  "build-mvn-only")
+     build_infra_manager_project
+     ;;
+  "start")
+     start_infra_manager_container
+     ;;
+  "stop")
+     kill_infra_manager_container
+     ;;
+   *)
+   echo "Available commands: (start|stop|build-and-run|build|build-docker-and-run|build-mvn-and-run|build-docker-only|build-mvn-only)"
+   ;;
+esac
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/pom.xml b/ambari-infra/ambari-infra-manager/pom.xml
new file mode 100644
index 0000000..b7708c2
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/pom.xml
@@ -0,0 +1,431 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <parent>
+    <artifactId>ambari-infra</artifactId>
+    <groupId>org.apache.ambari</groupId>
+    <version>2.0.0.0-SNAPSHOT</version>
+  </parent>
+  <name>Ambari Infra Manager</name>
+  <url>http://maven.apache.org</url>
+  <modelVersion>4.0.0</modelVersion>
+
+  <artifactId>ambari-infra-manager</artifactId>
+
+  <properties>
+    <spring.version>4.2.5.RELEASE</spring.version>
+    <spring.security.version>4.0.4.RELEASE</spring.security.version>
+    <jersey.version>2.23.2</jersey.version>
+    <jetty-version>9.2.11.v20150529</jetty-version>
+    <swagger.version>1.5.8</swagger.version>
+    <spring-data-solr.version>2.0.2.RELEASE</spring-data-solr.version>
+    <jjwt.version>0.6.0</jjwt.version>
+    <spring-batch.version>3.0.7.RELEASE</spring-batch.version>
+    <jdk.version>1.7</jdk.version>
+    <sqlite.version>3.8.11.2</sqlite.version>
+  </properties>
+
+  <build>
+    <finalName>ambari-infra-manager_${project.version}</finalName>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>3.0</version>
+        <configuration>
+          <source>${jdk.version}</source>
+          <target>${jdk.version}</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>exec-maven-plugin</artifactId>
+        <version>1.2.1</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>java</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <mainClass>org.apache.ambari.infra.InfraManager</mainClass>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.8</version>
+        <executions>
+          <execution>
+            <id>copy-dependencies</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputAbsoluteArtifactFilename>true</outputAbsoluteArtifactFilename>
+              <outputDirectory>${basedir}/target/libs</outputDirectory>
+              <overWriteReleases>false</overWriteReleases>
+              <overWriteSnapshots>false</overWriteSnapshots>
+              <overWriteIfNewer>true</overWriteIfNewer>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <version>1.7</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <configuration>
+              <target>
+                <ant antfile="build.xml">
+                  <target name="package"/>
+                </ant>
+              </target>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <version>3.4</version>
+      <scope>test</scope>
+    </dependency>
+    <!-- Spring dependencies -->
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-beans</artifactId>
+      <version>${spring.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-context</artifactId>
+      <version>${spring.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-test</artifactId>
+      <version>${spring.version}</version>
+    </dependency>
+    <!-- Spring Security -->
+    <dependency>
+      <groupId>org.springframework.security</groupId>
+      <artifactId>spring-security-web</artifactId>
+      <version>${spring.security.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.security</groupId>
+      <artifactId>spring-security-core</artifactId>
+      <version>${spring.security.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.security</groupId>
+      <artifactId>spring-security-config</artifactId>
+      <version>${spring.security.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.security</groupId>
+      <artifactId>spring-security-ldap</artifactId>
+      <version>${spring.security.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.ext</groupId>
+      <artifactId>jersey-spring3</artifactId>
+      <version>2.23.2</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.springframework</groupId>
+          <artifactId>*</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.connectors</groupId>
+      <artifactId>jersey-apache-connector</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.core</groupId>
+      <artifactId>jersey-client</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.media</groupId>
+      <artifactId>jersey-media-json-jettison</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.media</groupId>
+      <artifactId>jersey-media-json-jackson</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.core</groupId>
+      <artifactId>jersey-common</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>javax.servlet</groupId>
+      <artifactId>javax.servlet-api</artifactId>
+      <version>3.1.0</version>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <version>1.2.17</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.solr</groupId>
+      <artifactId>solr-solrj</artifactId>
+      <version>${solr.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.solr</groupId>
+      <artifactId>solr-core</artifactId>
+      <version>${solr.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>*</groupId>
+          <artifactId>*</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.lucene</groupId>
+      <artifactId>lucene-core</artifactId>
+      <version>${solr.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.lucene</groupId>
+      <artifactId>lucene-analyzers-common</artifactId>
+      <version>${solr.version}</version>
+    </dependency>
+    <!-- Hadoop -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>2.7.0</version>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>servlet-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty-util</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jetty-util</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-server</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>2.4</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+      <version>1.3.1</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-codec</groupId>
+      <artifactId>commons-codec</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.security.kerberos</groupId>
+      <artifactId>spring-security-kerberos-core</artifactId>
+      <version>1.0.1.RELEASE</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.security.kerberos</groupId>
+      <artifactId>spring-security-kerberos-web</artifactId>
+      <version>1.0.1.RELEASE</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.security.kerberos</groupId>
+      <artifactId>spring-security-kerberos-client</artifactId>
+      <version>1.0.1.RELEASE</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-security</artifactId>
+      <version>${jetty-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-server</artifactId>
+      <version>${jetty-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-servlet</artifactId>
+      <version>${jetty-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-servlets</artifactId>
+      <version>${jetty-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-util</artifactId>
+      <version>${jetty-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-webapp</artifactId>
+      <version>${jetty-version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.springframework</groupId>
+          <artifactId>*</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-annotations</artifactId>
+      <version>${jetty-version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.springframework</groupId>
+          <artifactId>*</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>cglib</groupId>
+      <artifactId>cglib</artifactId>
+      <version>3.2.4</version>
+    </dependency>
+    <dependency>
+      <groupId>io.swagger</groupId>
+      <artifactId>swagger-annotations</artifactId>
+      <version>${swagger.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.swagger</groupId>
+      <artifactId>swagger-core</artifactId>
+      <version>${swagger.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.swagger</groupId>
+      <artifactId>swagger-jersey2-jaxrs</artifactId>
+      <version>${swagger.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.ws.rs</groupId>
+          <artifactId>jsr311-api</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>io.swagger</groupId>
+      <artifactId>swagger-models</artifactId>
+      <version>${swagger.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.webjars</groupId>
+      <artifactId>swagger-ui</artifactId>
+      <version>2.1.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.data</groupId>
+      <artifactId>spring-data-solr</artifactId>
+      <version>${spring-data-solr.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-context-support</artifactId>
+      <version>${spring.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.batch</groupId>
+      <artifactId>spring-batch-core</artifactId>
+      <version>${spring-batch.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-jdbc</artifactId>
+      <version>${spring.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.jsonwebtoken</groupId>
+      <artifactId>jjwt</artifactId>
+      <version>${jjwt.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.ext</groupId>
+      <artifactId>jersey-bean-validation</artifactId>
+      <version>2.25</version>
+    </dependency>
+    <dependency>
+      <groupId>org.xerial</groupId>
+      <artifactId>sqlite-jdbc</artifactId>
+      <version>${sqlite.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.batch</groupId>
+      <artifactId>spring-batch-admin-manager</artifactId>
+      <version>1.3.1.RELEASE</version>
+    </dependency>
+  </dependencies>
+
+</project>
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
new file mode 100644
index 0000000..227bab4
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra;
+
+import org.apache.ambari.infra.conf.InfraManagerConfig;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.HandlerList;
+import org.eclipse.jetty.server.handler.ResourceHandler;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.resource.Resource;
+import org.eclipse.jetty.util.resource.ResourceCollection;
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.web.context.ContextLoaderListener;
+import org.springframework.web.context.request.RequestContextListener;
+import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
+
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import static org.apache.ambari.infra.common.InfraManagerConstants.DEFAULT_PORT;
+import static org.apache.ambari.infra.common.InfraManagerConstants.DEFAULT_PROTOCOL;
+import static org.apache.ambari.infra.common.InfraManagerConstants.INFRA_MANAGER_SESSION_ID;
+import static org.apache.ambari.infra.common.InfraManagerConstants.PROTOCOL_SSL;
+import static org.apache.ambari.infra.common.InfraManagerConstants.ROOT_CONTEXT;
+import static org.apache.ambari.infra.common.InfraManagerConstants.SESSION_TIMEOUT;
+import static org.apache.ambari.infra.common.InfraManagerConstants.WEB_RESOURCE_FOLDER;
+
+public class InfraManager {
+
+  private static final Logger LOG = LoggerFactory.getLogger(InfraManager.class);
+
+  public static void main(String[] args) {
+    Options options = new Options();
+    HelpFormatter helpFormatter = new HelpFormatter();
+    helpFormatter.setDescPadding(10);
+    helpFormatter.setWidth(200);
+
+    final Option helpOption = Option.builder("h")
+      .longOpt("help")
+      .desc("Print commands")
+      .build();
+
+    final Option portOption = Option.builder("p")
+      .longOpt("port")
+      .desc("Infra Manager port")
+      .numberOfArgs(1)
+      .argName("port_number")
+      .build();
+
+    final Option protocolOption = Option.builder("t")
+      .longOpt("tls-enabled")
+      .desc("TLS enabled for Infra Manager")
+      .build();
+
+    options.addOption(helpOption);
+    options.addOption(portOption);
+    options.addOption(protocolOption);
+
+    try {
+      CommandLineParser cmdLineParser = new DefaultParser();
+      CommandLine cli = cmdLineParser.parse(options, args);
+      int port = cli.hasOption('p') ? Integer.parseInt(cli.getOptionValue('p')) : DEFAULT_PORT;
+      String protocol = cli.hasOption("t") ? PROTOCOL_SSL : DEFAULT_PROTOCOL;
+
+      Server server = buildServer(port, protocol);
+      HandlerList handlers = new HandlerList();
+      handlers.addHandler(createSwaggerContext());
+      handlers.addHandler(createBaseWebappContext());
+
+      server.setHandler(handlers);
+      server.start();
+
+      LOG.debug("============================Server Dump=======================================");
+      LOG.debug(server.dump());
+      LOG.debug("==============================================================================");
+      server.join();
+    } catch (Exception e) {
+      // TODO
+      e.printStackTrace();
+    }
+  }
+
+  private static Server buildServer(int port, String protocol) {
+    Server server = new Server();
+    HttpConfiguration httpConfiguration = new HttpConfiguration();
+    httpConfiguration.setRequestHeaderSize(65535);
+    // TODO: tls
+    ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(httpConfiguration));
+    connector.setPort(port);
+    server.setConnectors(new Connector[]{connector});
+    URI infraManagerURI = URI.create(String.format("%s://0.0.0.0:%s", protocol, String.valueOf(port)));
+    LOG.info("Starting infra manager URI=" + infraManagerURI);
+    return server;
+  }
+
+  private static WebAppContext createBaseWebappContext() throws MalformedURLException {
+    URI webResourceBase = findWebResourceBase();
+    WebAppContext context = new WebAppContext();
+    context.setBaseResource(Resource.newResource(webResourceBase));
+    context.setContextPath(ROOT_CONTEXT);
+    context.setParentLoaderPriority(true);
+
+    // Configure Spring
+    context.addEventListener(new ContextLoaderListener());
+    context.addEventListener(new RequestContextListener());
+    // TODO: security, add: context.addFilter(new FilterHolder(new DelegatingFilterProxy("springSecurityFilterChain")), "/*", EnumSet.allOf(DispatcherType.class));
+    context.setInitParameter("contextClass", AnnotationConfigWebApplicationContext.class.getName());
+    context.setInitParameter("contextConfigLocation", InfraManagerConfig.class.getName());
+
+    // Configure Jersey
+    ServletHolder jerseyServlet = context.addServlet(org.glassfish.jersey.servlet.ServletContainer.class, "/api/v1/*");
+    jerseyServlet.setInitOrder(1);
+    jerseyServlet.setInitParameter("jersey.config.server.provider.packages","org.apache.ambari.infra.rest,io.swagger.jaxrs.listing");
+
+    context.getSessionHandler().getSessionManager().setMaxInactiveInterval(SESSION_TIMEOUT);
+    context.getSessionHandler().getSessionManager().getSessionCookieConfig().setName(INFRA_MANAGER_SESSION_ID);
+
+    return context;
+  }
+
+  private static URI findWebResourceBase() {
+    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(WEB_RESOURCE_FOLDER);
+    String errorMessage = "Web Resource Folder " + WEB_RESOURCE_FOLDER + " not found in classpath";
+    if (fileCompleteUrl != null) {
+      try {
+        return fileCompleteUrl.toURI().normalize();
+      } catch (URISyntaxException e) {
+        LOG.error(errorMessage, e);
+        System.exit(1);
+      }
+    } else {
+      LOG.error(errorMessage);
+      System.exit(1);
+    }
+    throw new IllegalStateException(errorMessage);
+  }
+
+  private static ServletContextHandler createSwaggerContext() throws URISyntaxException {
+    ResourceHandler resourceHandler = new ResourceHandler();
+    ResourceCollection resources = new ResourceCollection(new String[] {
+      InfraManager.class.getClassLoader()
+        .getResource("META-INF/resources/webjars/swagger-ui/2.1.0")
+        .toURI().toString(),
+      InfraManager.class.getClassLoader()
+        .getResource("swagger")
+        .toURI().toString()
+    });
+    resourceHandler.setBaseResource(resources);
+    resourceHandler.setWelcomeFiles(new String[]{"swagger.html"}); // rewrite index.html from swagger-ui webjar
+    ServletContextHandler context = new ServletContextHandler();
+    context.setContextPath("/docs/");
+    context.setHandler(resourceHandler);
+    return context;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/common/InfraManagerConstants.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/common/InfraManagerConstants.java
new file mode 100644
index 0000000..11714f3
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/common/InfraManagerConstants.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.common;
+
+public final class InfraManagerConstants {
+  public static final int DEFAULT_PORT = 61890;
+  public static final String DEFAULT_PROTOCOL = "http";
+  public static final String INFRA_MANAGER_SESSION_ID = "INFRA_MANAGER_SESSIONID";
+  public static final String PROTOCOL_SSL = "https";
+  public static final String ROOT_CONTEXT = "/";
+  public static final String WEB_RESOURCE_FOLDER = "webapp";
+  public static final Integer SESSION_TIMEOUT = 60 * 30;
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerApiDocConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerApiDocConfig.java
new file mode 100644
index 0000000..22e2263
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerApiDocConfig.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.conf;
+
+import io.swagger.jaxrs.config.BeanConfig;
+import io.swagger.jaxrs.listing.ApiListingResource;
+import io.swagger.jaxrs.listing.SwaggerSerializers;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class InfraManagerApiDocConfig {
+
+  @Bean
+  public ApiListingResource apiListingResource() {
+    return new ApiListingResource();
+  }
+
+  @Bean
+  public SwaggerSerializers swaggerSerializers() {
+    return new SwaggerSerializers();
+  }
+
+  @Bean
+  public BeanConfig swaggerConfig() {
+    BeanConfig beanConfig = new BeanConfig();
+    beanConfig.setSchemes(new String[]{"http", "https"});
+    beanConfig.setBasePath("/api/v1");
+    beanConfig.setTitle("Infra Manager REST API");
+    beanConfig.setDescription("Manager component for Ambari Infra");
+    beanConfig.setLicense("Apache 2.0");
+    beanConfig.setLicenseUrl("http://www.apache.org/licenses/LICENSE-2.0.html");
+    beanConfig.setScan(true);
+    beanConfig.setVersion("1.0.0");
+    beanConfig.setResourcePackage("org.apache.ambari.infra.rest");
+    return beanConfig;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java
new file mode 100644
index 0000000..86059a2
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.conf;
+
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
+
+@Configuration
+@ComponentScan("org.apache.ambari.infra")
+@PropertySource(value = {"classpath:infra-manager.properties"})
+public class InfraManagerConfig {
+
+  @Bean
+  public static PropertySourcesPlaceholderConfigurer propertyConfigurer() {
+    return new PropertySourcesPlaceholderConfigurer();
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java
new file mode 100644
index 0000000..c3d8db6
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java
@@ -0,0 +1,282 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.conf.batch;
+
+import org.apache.ambari.infra.job.dummy.DummyItemProcessor;
+import org.apache.ambari.infra.job.dummy.DummyItemWriter;
+import org.apache.ambari.infra.job.dummy.DummyObject;
+import org.springframework.batch.admin.service.JdbcSearchableJobExecutionDao;
+import org.springframework.batch.admin.service.JdbcSearchableJobInstanceDao;
+import org.springframework.batch.admin.service.JdbcSearchableStepExecutionDao;
+import org.springframework.batch.admin.service.JobService;
+import org.springframework.batch.admin.service.SearchableJobExecutionDao;
+import org.springframework.batch.admin.service.SearchableJobInstanceDao;
+import org.springframework.batch.admin.service.SearchableStepExecutionDao;
+import org.springframework.batch.admin.service.SimpleJobService;
+import org.springframework.batch.core.Job;
+import org.springframework.batch.core.Step;
+import org.springframework.batch.core.configuration.JobRegistry;
+import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
+import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
+import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
+import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
+import org.springframework.batch.core.explore.JobExplorer;
+import org.springframework.batch.core.launch.JobLauncher;
+import org.springframework.batch.core.launch.JobOperator;
+import org.springframework.batch.core.launch.support.SimpleJobLauncher;
+import org.springframework.batch.core.launch.support.SimpleJobOperator;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.dao.DefaultExecutionContextSerializer;
+import org.springframework.batch.core.repository.dao.ExecutionContextDao;
+import org.springframework.batch.core.repository.dao.JdbcExecutionContextDao;
+import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
+import org.springframework.batch.item.ItemProcessor;
+import org.springframework.batch.item.ItemReader;
+import org.springframework.batch.item.ItemWriter;
+import org.springframework.batch.item.file.FlatFileItemReader;
+import org.springframework.batch.item.file.LineMapper;
+import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
+import org.springframework.batch.item.file.mapping.DefaultLineMapper;
+import org.springframework.batch.item.file.mapping.FieldSetMapper;
+import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
+import org.springframework.batch.item.file.transform.LineTokenizer;
+import org.springframework.batch.support.transaction.ResourcelessTransactionManager;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.core.io.Resource;
+import org.springframework.core.task.SimpleAsyncTaskExecutor;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.datasource.DriverManagerDataSource;
+import org.springframework.jdbc.datasource.init.DataSourceInitializer;
+import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import org.springframework.transaction.PlatformTransactionManager;
+
+import javax.inject.Inject;
+import javax.sql.DataSource;
+import java.net.MalformedURLException;
+
+@Configuration
+@EnableBatchProcessing
+@EnableScheduling
+public class InfraManagerBatchConfig {
+
+  @Value("classpath:org/springframework/batch/core/schema-drop-sqlite.sql")
+  private Resource dropRepositoryTables;
+
+  @Value("classpath:org/springframework/batch/core/schema-sqlite.sql")
+  private Resource dataRepositorySchema;
+
+  @Value("${infra-manager.batch.db.init:false}")
+  private boolean dropDatabaseOnStartup;
+
+  @Value("${infra-manager.batch.db.file:/etc/ambari-inra-manager/conf/repository.db}")
+  private String sqliteDbFileLocation;
+
+  @Value("${infra-manager.batch.db.username}")
+  private String databaseUsername;
+
+  @Value("${infra-manager.batch.db.password}")
+  private String databasePassword;
+
+  @Inject
+  private StepBuilderFactory steps;
+
+  @Inject
+  private JobBuilderFactory jobs;
+
+  @Inject
+  private JobRegistry jobRegistry;
+
+  @Inject
+  private JobExplorer jobExplorer;
+
+  @Bean
+  public DataSource dataSource() {
+    DriverManagerDataSource dataSource = new DriverManagerDataSource();
+    dataSource.setDriverClassName("org.sqlite.JDBC");
+    dataSource.setUrl("jdbc:sqlite:" + sqliteDbFileLocation);
+    dataSource.setUsername(databaseUsername);
+    dataSource.setPassword(databasePassword);
+    return dataSource;
+  }
+
+  @Bean
+  public DataSourceInitializer dataSourceInitializer(DataSource dataSource)
+    throws MalformedURLException {
+    ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator();
+    if (dropDatabaseOnStartup) {
+      databasePopulator.addScript(dropRepositoryTables);
+      databasePopulator.setIgnoreFailedDrops(true);
+    }
+    databasePopulator.addScript(dataRepositorySchema);
+    databasePopulator.setContinueOnError(true);
+
+    DataSourceInitializer initializer = new DataSourceInitializer();
+    initializer.setDataSource(dataSource);
+    initializer.setDatabasePopulator(databasePopulator);
+
+    return initializer;
+  }
+
+  @Bean
+  public JobRepository jobRepository() throws Exception {
+    JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
+    factory.setDataSource(dataSource());
+    factory.setTransactionManager(getTransactionManager());
+    factory.afterPropertiesSet();
+    return factory.getObject();
+  }
+
+  @Bean
+  public PlatformTransactionManager getTransactionManager() {
+    return new ResourcelessTransactionManager();
+  }
+
+  @Bean(name = "jobLauncher")
+  public JobLauncher jobLauncher() throws Exception {
+    SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
+    jobLauncher.setJobRepository(jobRepository());
+    jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor());
+    jobLauncher.afterPropertiesSet();
+    return jobLauncher;
+  }
+
+  @Bean
+  public JobOperator jobOperator() throws Exception {
+    SimpleJobOperator jobOperator = new SimpleJobOperator();
+    jobOperator.setJobExplorer(jobExplorer);
+    jobOperator.setJobLauncher(jobLauncher());
+    jobOperator.setJobRegistry(jobRegistry);
+    jobOperator.setJobRepository(jobRepository());
+    return jobOperator;
+  }
+
+  @Bean
+  public JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor() {
+    JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor = new JobRegistryBeanPostProcessor();
+    jobRegistryBeanPostProcessor.setJobRegistry(jobRegistry);
+    return jobRegistryBeanPostProcessor;
+  }
+
+  @Bean
+  public JdbcTemplate jdbcTemplate() {
+    return new JdbcTemplate(dataSource());
+  }
+
+  @Bean
+  public SearchableJobInstanceDao searchableJobInstanceDao() {
+    JdbcSearchableJobInstanceDao dao = new JdbcSearchableJobInstanceDao();
+    dao.setJdbcTemplate(jdbcTemplate());
+    return dao;
+  }
+
+  @Bean
+  public SearchableJobExecutionDao searchableJobExecutionDao() {
+    JdbcSearchableJobExecutionDao dao = new JdbcSearchableJobExecutionDao();
+    dao.setJdbcTemplate(jdbcTemplate());
+    dao.setDataSource(dataSource());
+    return dao;
+  }
+
+  @Bean
+  public SearchableStepExecutionDao searchableStepExecutionDao() {
+    JdbcSearchableStepExecutionDao dao = new JdbcSearchableStepExecutionDao();
+    dao.setDataSource(dataSource());
+    dao.setJdbcTemplate(jdbcTemplate());
+    return dao;
+  }
+
+  @Bean
+  public ExecutionContextDao executionContextDao() {
+    JdbcExecutionContextDao dao = new JdbcExecutionContextDao();
+    dao.setSerializer(new DefaultExecutionContextSerializer());
+    dao.setJdbcTemplate(jdbcTemplate());
+    return dao;
+  }
+
+  @Bean
+  public JobService jobService() throws Exception {
+    return new
+      SimpleJobService(searchableJobInstanceDao(), searchableJobExecutionDao(), searchableStepExecutionDao(),
+      jobRepository(), jobLauncher(), jobRegistry, executionContextDao());
+  }
+
+  @Bean(name = "dummyStep")
+  protected Step dummyStep(ItemReader<DummyObject> reader,
+                       ItemProcessor<DummyObject, String> processor,
+                       ItemWriter<String> writer) {
+    return steps.get("dummyStep").<DummyObject, String> chunk(2)
+      .reader(reader).processor(processor).writer(writer).build();
+  }
+
+  @Bean(name = "dummyJob")
+  public Job job(@Qualifier("dummyStep") Step dummyStep) {
+    return jobs.get("dummyJob").start(dummyStep).build();
+  }
+
+  @Bean
+  public ItemReader<DummyObject> dummyItemReader() {
+    FlatFileItemReader<DummyObject> csvFileReader = new FlatFileItemReader<>();
+    csvFileReader.setResource(new ClassPathResource("dummy/dummy.txt"));
+    csvFileReader.setLinesToSkip(1);
+    LineMapper<DummyObject> lineMapper = dummyLineMapper();
+    csvFileReader.setLineMapper(lineMapper);
+    return csvFileReader;
+  }
+
+  @Bean
+  public ItemProcessor<DummyObject, String> dummyItemProcessor() {
+    return new DummyItemProcessor();
+  }
+
+  @Bean
+  public ItemWriter<String> dummyItemWriter() {
+    return new DummyItemWriter();
+  }
+
+  private LineMapper<DummyObject> dummyLineMapper() {
+    DefaultLineMapper<DummyObject> lineMapper = new DefaultLineMapper<>();
+
+    LineTokenizer dummyTokenizer = dummyTokenizer();
+    lineMapper.setLineTokenizer(dummyTokenizer);
+
+    FieldSetMapper<DummyObject> dummyFieldSetMapper = dummyFieldSetMapper();
+    lineMapper.setFieldSetMapper(dummyFieldSetMapper);
+
+    return lineMapper;
+  }
+
+  private FieldSetMapper<DummyObject> dummyFieldSetMapper() {
+    BeanWrapperFieldSetMapper<DummyObject> studentInformationMapper = new BeanWrapperFieldSetMapper<>();
+    studentInformationMapper.setTargetType(DummyObject.class);
+    return studentInformationMapper;
+  }
+
+  private LineTokenizer dummyTokenizer() {
+    DelimitedLineTokenizer studentLineTokenizer = new DelimitedLineTokenizer();
+    studentLineTokenizer.setDelimiter(",");
+    studentLineTokenizer.setNames(new String[]{"f1", "f2"});
+    return studentLineTokenizer;
+  }
+
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
new file mode 100644
index 0000000..a124e4d
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.dummy;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.batch.item.ItemProcessor;
+
+public class DummyItemProcessor implements ItemProcessor<DummyObject, String> {
+
+  private static final Logger LOG = LoggerFactory.getLogger(DummyItemProcessor.class);
+
+  @Override
+  public String process(DummyObject input) throws Exception {
+    LOG.info("Dummy processing, f1: {}, f2: {}. wait 10 seconds", input.getF1(), input.getF2());
+    Thread.sleep(10000);
+    return String.format("%s, %s", input.getF1(), input.getF2());
+  }
+
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
new file mode 100644
index 0000000..f495795
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.dummy;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.batch.item.ItemWriter;
+
+import java.util.List;
+
+public class DummyItemWriter implements ItemWriter<String> {
+
+  private static final Logger LOG = LoggerFactory.getLogger(DummyItemWriter.class);
+
+  @Override
+  public void write(List<? extends String> values) throws Exception {
+    LOG.info("DummyItem writer called (values: {})... wait 1 seconds", values.toString());
+    Thread.sleep(1000);
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyObject.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyObject.java
new file mode 100644
index 0000000..ce087dd
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyObject.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.dummy;
+
+public class DummyObject {
+  private String f1;
+  private String f2;
+
+  public String getF1() {
+    return f1;
+  }
+
+  public void setF1(String f1) {
+    this.f1 = f1;
+  }
+
+  public String getF2() {
+    return f2;
+  }
+
+  public void setF2(String f2) {
+    this.f2 = f2;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
new file mode 100644
index 0000000..fc0a4f7
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
@@ -0,0 +1,274 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.manager;
+
+import com.google.common.collect.Lists;
+import org.apache.ambari.infra.model.ExecutionContextResponse;
+import org.apache.ambari.infra.model.JobDetailsResponse;
+import org.apache.ambari.infra.model.JobExecutionDetailsResponse;
+import org.apache.ambari.infra.model.JobExecutionInfoResponse;
+import org.apache.ambari.infra.model.JobInstanceDetailsResponse;
+import org.apache.ambari.infra.model.JobOperationParams;
+import org.apache.ambari.infra.model.StepExecutionContextResponse;
+import org.apache.ambari.infra.model.StepExecutionInfoResponse;
+import org.apache.ambari.infra.model.StepExecutionProgressResponse;
+import org.springframework.batch.admin.history.StepExecutionHistory;
+import org.springframework.batch.admin.service.JobService;
+import org.springframework.batch.admin.service.NoSuchStepExecutionException;
+import org.springframework.batch.admin.web.JobInfo;
+import org.springframework.batch.admin.web.StepExecutionProgress;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobInstance;
+import org.springframework.batch.core.JobParametersBuilder;
+import org.springframework.batch.core.JobParametersInvalidException;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.launch.JobExecutionNotRunningException;
+import org.springframework.batch.core.launch.JobInstanceAlreadyExistsException;
+import org.springframework.batch.core.launch.JobOperator;
+import org.springframework.batch.core.launch.NoSuchJobException;
+import org.springframework.batch.core.launch.NoSuchJobExecutionException;
+import org.springframework.batch.core.launch.NoSuchJobInstanceException;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
+import org.springframework.batch.core.repository.JobRestartException;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+
+@Named
+public class JobManager {
+
+  @Inject
+  private JobService jobService;
+
+  @Inject
+  private JobOperator jobOperator;
+
+  private TimeZone timeZone = TimeZone.getDefault();
+
+  public Set<String> getAllJobNames() {
+    return jobOperator.getJobNames();
+  }
+
+  /**
+   * Launch a new job instance (based on job name) and applies customized parameters to it.
+   * Also add a new date parameter to make sure the job instance will be unique
+   */
+  public JobExecutionInfoResponse launchJob(String jobName, String params)
+    throws JobParametersInvalidException, JobInstanceAlreadyExistsException, NoSuchJobException,
+    JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException {
+    // TODO: handle params
+    JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
+    jobParametersBuilder.addDate("date", new Date());
+    return new JobExecutionInfoResponse(jobService.launch(jobName, jobParametersBuilder.toJobParameters()), timeZone);
+  }
+
+  /**
+   * Get all executions ids that mapped to specific job name,
+   */
+  public Set<Long> getExecutionIdsByJobName(String jobName) throws NoSuchJobException {
+    return jobOperator.getRunningExecutions(jobName);
+  }
+
+  /**
+   * Stop all running job executions and returns with the number of stopped jobs.
+   */
+  public Integer stopAllJobs() {
+    return jobService.stopAll();
+  }
+
+  /**
+   * Gather job execution details by job execution id.
+   */
+  public JobExecutionDetailsResponse getExectionInfo(Long jobExecutionId) throws NoSuchJobExecutionException {
+    JobExecution jobExecution = jobService.getJobExecution(jobExecutionId);
+    List<StepExecutionInfoResponse> stepExecutionInfos = new ArrayList<StepExecutionInfoResponse>();
+    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
+      stepExecutionInfos.add(new StepExecutionInfoResponse(stepExecution, timeZone));
+    }
+    Collections.sort(stepExecutionInfos, new Comparator<StepExecutionInfoResponse>() {
+      @Override
+      public int compare(StepExecutionInfoResponse o1, StepExecutionInfoResponse o2) {
+        return o1.getId().compareTo(o2.getId());
+      }
+    });
+    return new JobExecutionDetailsResponse(new JobExecutionInfoResponse(jobExecution, timeZone), stepExecutionInfos);
+  }
+
+  /**
+   * Stop or abandon a running job execution by job execution id
+   */
+  public JobExecutionInfoResponse stopOrAbandonJobByExecutionId(Long jobExecutionId, JobOperationParams.JobStopOrAbandonOperationParam operation)
+    throws NoSuchJobExecutionException, JobExecutionNotRunningException, JobExecutionAlreadyRunningException {
+    JobExecution jobExecution;
+    if (JobOperationParams.JobStopOrAbandonOperationParam.STOP.equals(operation)) {
+      jobExecution = jobService.stop(jobExecutionId);
+    } else if (JobOperationParams.JobStopOrAbandonOperationParam.ABANDON.equals(operation)) {
+      jobExecution = jobService.abandon(jobExecutionId);
+    } else {
+      throw new UnsupportedOperationException("Unsupported operaration");
+    }
+    return new JobExecutionInfoResponse(jobExecution, timeZone);
+  }
+
+  /**
+   * Get execution context for a job execution instance. (context can be shipped between job executions)
+   */
+  public ExecutionContextResponse getExecutionContextByJobExecutionId(Long executionId) throws NoSuchJobExecutionException {
+    JobExecution jobExecution = jobService.getJobExecution(executionId);
+    Map<String, Object> executionMap = new HashMap<>();
+    for (Map.Entry<String, Object> entry : jobExecution.getExecutionContext().entrySet()) {
+      executionMap.put(entry.getKey(), entry.getValue());
+    }
+    return new ExecutionContextResponse(executionId, executionMap);
+  }
+
+  /**
+   * Restart a specific job instance with the same parameters. (only restart operation is supported here)
+   */
+  public JobExecutionInfoResponse restart(Long jobInstanceId, String jobName,
+                                          JobOperationParams.JobRestartOperationParam operation) throws NoSuchJobException, JobParametersInvalidException,
+    JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, NoSuchJobExecutionException {
+    if (JobOperationParams.JobRestartOperationParam.RESTART.equals(operation)) {
+      Collection<JobExecution> jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstanceId);
+      JobExecution jobExecution = jobExecutions.iterator().next();
+      Long jobExecutionId = jobExecution.getId();
+      return new JobExecutionInfoResponse(jobService.restart(jobExecutionId), timeZone);
+    } else {
+      throw new UnsupportedOperationException("Unsupported operation (try: RESTART)");
+    }
+  }
+
+  /**
+   * Get all job details. (paged)
+   */
+  public List<JobInfo> getAllJobs(int start, int pageSize) {
+    List<JobInfo> jobs = new ArrayList<>();
+    Collection<String> names = jobService.listJobs(start, pageSize);
+    for (String name : names) {
+      int count = 0;
+      try {
+        count = jobService.countJobExecutionsForJob(name);
+      }
+      catch (NoSuchJobException e) {
+        // shouldn't happen
+      }
+      boolean launchable = jobService.isLaunchable(name);
+      boolean incrementable = jobService.isIncrementable(name);
+      jobs.add(new JobInfo(name, count, null, launchable, incrementable));
+    }
+    return jobs;
+  }
+
+  /**
+   * Get all executions for unique job instance.
+   */
+  public List<JobExecutionInfoResponse> getExecutionsForJobInstance(String jobName, Long jobInstanceId) throws NoSuchJobInstanceException, NoSuchJobException {
+    List<JobExecutionInfoResponse> result = Lists.newArrayList();
+    JobInstance jobInstance = jobService.getJobInstance(jobInstanceId);
+    Collection<JobExecution> jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstance.getInstanceId());
+    for (JobExecution jobExecution : jobExecutions) {
+      result.add(new JobExecutionInfoResponse(jobExecution, timeZone));
+    }
+    return result;
+  }
+
+  /**
+   * Get job details for a specific job. (paged)
+   */
+  public JobDetailsResponse getJobDetails(String jobName, int page, int size) throws NoSuchJobException {
+    List<JobInstanceDetailsResponse> jobInstanceResponses = Lists.newArrayList();
+    Collection<JobInstance> jobInstances = jobService.listJobInstances(jobName, page, size);
+
+    int count = jobService.countJobExecutionsForJob(jobName);
+    boolean launchable = jobService.isLaunchable(jobName);
+    boolean isIncrementable = jobService.isIncrementable(jobName);
+
+    for (JobInstance jobInstance: jobInstances) {
+      List<JobExecutionInfoResponse> executionInfos = Lists.newArrayList();
+      Collection<JobExecution> jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstance.getId());
+      if (jobExecutions != null) {
+        for (JobExecution jobExecution : jobExecutions) {
+          executionInfos.add(new JobExecutionInfoResponse(jobExecution, timeZone));
+        }
+      }
+      jobInstanceResponses.add(new JobInstanceDetailsResponse(jobInstance, executionInfos));
+    }
+    return new JobDetailsResponse(new JobInfo(jobName, count, launchable, isIncrementable), jobInstanceResponses);
+  }
+
+  /**
+   * Get step execution details based for job execution id and step execution id.
+   */
+  public StepExecutionInfoResponse getStepExecution(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
+    StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId);
+    return new StepExecutionInfoResponse(stepExecution, timeZone);
+  }
+
+  /**
+   * Get step execution context details. (execution context can be shipped between steps)
+   */
+  public StepExecutionContextResponse getStepExecutionContext(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
+    StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId);
+    Map<String, Object> executionMap = new HashMap<>();
+    for (Map.Entry<String, Object> entry : stepExecution.getExecutionContext().entrySet()) {
+      executionMap.put(entry.getKey(), entry.getValue());
+    }
+    return new StepExecutionContextResponse(executionMap, jobExecutionId, stepExecutionId, stepExecution.getStepName());
+  }
+
+  /**
+   * Get step execution progress status detauls.
+   */
+  public StepExecutionProgressResponse getStepExecutionProgress(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
+    StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId);
+    StepExecutionInfoResponse stepExecutionInfoResponse = new StepExecutionInfoResponse(stepExecution, timeZone);
+    String stepName = stepExecution.getStepName();
+    if (stepName.contains(":partition")) {
+      stepName = stepName.replaceAll("(:partition).*", "$1*");
+    }
+    String jobName = stepExecution.getJobExecution().getJobInstance().getJobName();
+    StepExecutionHistory stepExecutionHistory = computeHistory(jobName, stepName);
+    StepExecutionProgress stepExecutionProgress = new StepExecutionProgress(stepExecution, stepExecutionHistory);
+
+    return new StepExecutionProgressResponse(stepExecutionProgress, stepExecutionHistory, stepExecutionInfoResponse);
+
+  }
+
+  private StepExecutionHistory computeHistory(String jobName, String stepName) {
+    int total = jobService.countStepExecutionsForStep(jobName, stepName);
+    StepExecutionHistory stepExecutionHistory = new StepExecutionHistory(stepName);
+    for (int i = 0; i < total; i += 1000) {
+      for (StepExecution stepExecution : jobService.listStepExecutionsForStep(jobName, stepName, i, 1000)) {
+        stepExecutionHistory.append(stepExecution);
+      }
+    }
+    return stepExecutionHistory;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java
new file mode 100644
index 0000000..2d46c54
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import java.util.Map;
+
+public class ExecutionContextResponse {
+
+  private final Long jobExecutionId;
+  private final Map<String, Object> executionContextMap;
+
+  public ExecutionContextResponse(Long jobExecutionId, Map<String, Object> executionContextMap) {
+    this.jobExecutionId = jobExecutionId;
+    this.executionContextMap = executionContextMap;
+  }
+
+  public Long getJobExecutionId() {
+    return jobExecutionId;
+  }
+
+  public Map<String, Object> getExecutionContextMap() {
+    return executionContextMap;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java
new file mode 100644
index 0000000..cd34fef
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import org.springframework.batch.admin.web.JobInfo;
+
+import java.util.List;
+
+public class JobDetailsResponse {
+
+  private JobInfo jobInfo;
+  private List<JobInstanceDetailsResponse> jobInstanceDetailsResponseList;
+
+  public JobDetailsResponse() {
+  }
+
+  public JobDetailsResponse(JobInfo jobInfo, List<JobInstanceDetailsResponse> jobInstanceDetailsResponseList) {
+    this.jobInfo = jobInfo;
+    this.jobInstanceDetailsResponseList = jobInstanceDetailsResponseList;
+  }
+
+  public JobInfo getJobInfo() {
+    return jobInfo;
+  }
+
+  public void setJobInfo(JobInfo jobInfo) {
+    this.jobInfo = jobInfo;
+  }
+
+  public List<JobInstanceDetailsResponse> getJobInstanceDetailsResponseList() {
+    return jobInstanceDetailsResponseList;
+  }
+
+  public void setJobInstanceDetailsResponseList(List<JobInstanceDetailsResponse> jobInstanceDetailsResponseList) {
+    this.jobInstanceDetailsResponseList = jobInstanceDetailsResponseList;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java
new file mode 100644
index 0000000..695b57f
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import java.util.List;
+
+public class JobExecutionDetailsResponse {
+
+  private JobExecutionInfoResponse jobExecutionInfoResponse;
+
+  private List<StepExecutionInfoResponse> stepExecutionInfoList;
+
+  public JobExecutionDetailsResponse(JobExecutionInfoResponse jobExecutionInfoResponse, List<StepExecutionInfoResponse> stepExecutionInfoList) {
+    this.jobExecutionInfoResponse = jobExecutionInfoResponse;
+    this.stepExecutionInfoList = stepExecutionInfoList;
+  }
+
+  public JobExecutionInfoResponse getJobExecutionInfoResponse() {
+    return jobExecutionInfoResponse;
+  }
+
+  public void setJobExecutionInfoResponse(JobExecutionInfoResponse jobExecutionInfoResponse) {
+    this.jobExecutionInfoResponse = jobExecutionInfoResponse;
+  }
+
+  public List<StepExecutionInfoResponse> getStepExecutionInfoList() {
+    return stepExecutionInfoList;
+  }
+
+  public void setStepExecutionInfoList(List<StepExecutionInfoResponse> stepExecutionInfoList) {
+    this.stepExecutionInfoList = stepExecutionInfoList;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java
new file mode 100644
index 0000000..a7e4a4f
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import org.apache.ambari.infra.model.wrapper.JobExecutionData;
+import org.springframework.batch.admin.web.JobParametersExtractor;
+import org.springframework.batch.core.BatchStatus;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobInstance;
+import org.springframework.batch.core.converter.DefaultJobParametersConverter;
+import org.springframework.batch.core.converter.JobParametersConverter;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Properties;
+import java.util.TimeZone;
+
+public class JobExecutionInfoResponse {
+  private Long id;
+  private int stepExecutionCount;
+  private Long jobId;
+  private String jobName;
+  private String startDate = "";
+  private String startTime = "";
+  private String duration = "";
+  private JobExecutionData jobExecutionData;
+  private Properties jobParameters;
+  private String jobParametersString;
+  private boolean restartable = false;
+  private boolean abandonable = false;
+  private boolean stoppable = false;
+  private final TimeZone timeZone;
+
+
+  public JobExecutionInfoResponse(JobExecution jobExecution, TimeZone timeZone) {
+    JobParametersConverter converter = new DefaultJobParametersConverter();
+    this.jobExecutionData = new JobExecutionData(jobExecution);
+    this.timeZone = timeZone;
+    this.id = jobExecutionData.getId();
+    this.jobId = jobExecutionData.getJobId();
+    this.stepExecutionCount = jobExecutionData.getStepExecutions().size();
+    this.jobParameters = converter.getProperties(jobExecutionData.getJobParameters());
+    this.jobParametersString = (new JobParametersExtractor()).fromJobParameters(jobExecutionData.getJobParameters());
+    JobInstance jobInstance = jobExecutionData.getJobInstance();
+    if(jobInstance != null) {
+      this.jobName = jobInstance.getJobName();
+      BatchStatus endTime = jobExecutionData.getStatus();
+      this.restartable = endTime.isGreaterThan(BatchStatus.STOPPING) && endTime.isLessThan(BatchStatus.ABANDONED);
+      this.abandonable = endTime.isGreaterThan(BatchStatus.STARTED) && endTime != BatchStatus.ABANDONED;
+      this.stoppable = endTime.isLessThan(BatchStatus.STOPPING);
+    } else {
+      this.jobName = "?";
+    }
+
+    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+    SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss");
+    SimpleDateFormat durationFormat = new SimpleDateFormat("HH:mm:ss");
+
+    durationFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
+    timeFormat.setTimeZone(timeZone);
+    dateFormat.setTimeZone(timeZone);
+    if(jobExecutionData.getStartTime() != null) {
+      this.startDate = dateFormat.format(jobExecutionData.getStartTime());
+      this.startTime = timeFormat.format(jobExecutionData.getStartTime());
+      Date endTime1 = jobExecutionData.getEndTime() != null? jobExecutionData.getEndTime():new Date();
+      this.duration = durationFormat.format(new Date(endTime1.getTime() - jobExecutionData.getStartTime().getTime()));
+    }
+  }
+
+  public Long getId() {
+    return id;
+  }
+
+  public int getStepExecutionCount() {
+    return stepExecutionCount;
+  }
+
+  public Long getJobId() {
+    return jobId;
+  }
+
+  public String getJobName() {
+    return jobName;
+  }
+
+  public String getStartDate() {
+    return startDate;
+  }
+
+  public String getStartTime() {
+    return startTime;
+  }
+
+  public String getDuration() {
+    return duration;
+  }
+
+  public JobExecutionData getJobExecutionData() {
+    return jobExecutionData;
+  }
+
+  public Properties getJobParameters() {
+    return jobParameters;
+  }
+
+  public String getJobParametersString() {
+    return jobParametersString;
+  }
+
+  public boolean isRestartable() {
+    return restartable;
+  }
+
+  public boolean isAbandonable() {
+    return abandonable;
+  }
+
+  public boolean isStoppable() {
+    return stoppable;
+  }
+
+  public TimeZone getTimeZone() {
+    return timeZone;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRequest.java
new file mode 100644
index 0000000..b4c20e9
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRequest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import javax.ws.rs.PathParam;
+
+public class JobExecutionRequest {
+
+  @PathParam("jobName")
+  private String jobName;
+
+  @PathParam("jobInstanceId")
+  private Long jobInstanceId;
+
+  public String getJobName() {
+    return jobName;
+  }
+
+  public Long getJobInstanceId() {
+    return jobInstanceId;
+  }
+
+  public void setJobName(String jobName) {
+    this.jobName = jobName;
+  }
+
+  public void setJobInstanceId(Long jobInstanceId) {
+    this.jobInstanceId = jobInstanceId;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java
new file mode 100644
index 0000000..88687e7
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+public class JobExecutionRestartRequest {
+
+  private String jobName;
+
+  private Long jobInstanceId;
+
+  private JobOperationParams.JobRestartOperationParam operation;
+
+  public String getJobName() {
+    return jobName;
+  }
+
+  public void setJobName(String jobName) {
+    this.jobName = jobName;
+  }
+
+  public Long getJobInstanceId() {
+    return jobInstanceId;
+  }
+
+  public void setJobExecutionId(Long jobExecutionId) {
+    this.jobInstanceId = jobExecutionId;
+  }
+
+  public JobOperationParams.JobRestartOperationParam getOperation() {
+    return operation;
+  }
+
+  public void setOperation(JobOperationParams.JobRestartOperationParam operation) {
+    this.operation = operation;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java
new file mode 100644
index 0000000..b176f12
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import javax.validation.constraints.NotNull;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
+
+public class JobExecutionStopRequest {
+
+  @PathParam("jobExecutionId")
+  @NotNull
+  private Long jobExecutionId;
+
+  @QueryParam("operation")
+  @NotNull
+  private JobOperationParams.JobStopOrAbandonOperationParam operation;
+
+  public Long getJobExecutionId() {
+    return jobExecutionId;
+  }
+
+  public void setJobExecutionId(Long jobExecutionId) {
+    this.jobExecutionId = jobExecutionId;
+  }
+
+  public JobOperationParams.JobStopOrAbandonOperationParam getOperation() {
+    return operation;
+  }
+
+  public void setOperation(JobOperationParams.JobStopOrAbandonOperationParam operation) {
+    this.operation = operation;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java
new file mode 100644
index 0000000..af88654
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import org.springframework.batch.core.JobInstance;
+
+import java.util.List;
+
+public class JobInstanceDetailsResponse {
+
+  private JobInstance jobInstance;
+
+  private List<JobExecutionInfoResponse> jobExecutionInfoResponseList;
+
+  public JobInstanceDetailsResponse() {
+  }
+
+  public JobInstanceDetailsResponse(JobInstance jobInstance, List<JobExecutionInfoResponse> jobExecutionInfoResponseList) {
+    this.jobInstance = jobInstance;
+    this.jobExecutionInfoResponseList = jobExecutionInfoResponseList;
+  }
+
+  public JobInstance getJobInstance() {
+    return jobInstance;
+  }
+
+  public void setJobInstance(JobInstance jobInstance) {
+    this.jobInstance = jobInstance;
+  }
+
+  public List<JobExecutionInfoResponse> getJobExecutionInfoResponseList() {
+    return jobExecutionInfoResponseList;
+  }
+
+  public void setJobExecutionInfoResponseList(List<JobExecutionInfoResponse> jobExecutionInfoResponseList) {
+    this.jobExecutionInfoResponseList = jobExecutionInfoResponseList;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java
new file mode 100644
index 0000000..905a4fa
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import javax.validation.constraints.NotNull;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
+
+public class JobInstanceStartRequest {
+
+  @PathParam("jobName")
+  @NotNull
+  private String jobName;
+
+  @QueryParam("params")
+  String params;
+
+  public String getJobName() {
+    return jobName;
+  }
+
+  public void setJobName(String jobName) {
+    this.jobName = jobName;
+  }
+
+  public String getParams() {
+    return params;
+  }
+
+  public void setParams(String params) {
+    this.params = params;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobOperationParams.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobOperationParams.java
new file mode 100644
index 0000000..e286deb
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobOperationParams.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+public class JobOperationParams {
+
+  public enum JobStopOrAbandonOperationParam {
+    STOP, ABANDON;
+  }
+
+  public enum JobRestartOperationParam {
+    RESTART;
+  }
+
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobRequest.java
new file mode 100644
index 0000000..b4fd478
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobRequest.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import javax.validation.constraints.NotNull;
+import javax.ws.rs.PathParam;
+
+public class JobRequest extends PageRequest {
+
+  @NotNull
+  @PathParam("jobName")
+  private String jobName;
+
+  public String getJobName() {
+    return jobName;
+  }
+
+  public void setJobName(String jobName) {
+    this.jobName = jobName;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/PageRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/PageRequest.java
new file mode 100644
index 0000000..679d4fd
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/PageRequest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.QueryParam;
+
+public class PageRequest {
+
+  @QueryParam("page")
+  @DefaultValue("0")
+  private int page;
+
+  @QueryParam("size")
+  @DefaultValue("20")
+  private int size;
+
+  public int getPage() {
+    return page;
+  }
+
+  public void setPage(int page) {
+    this.page = page;
+  }
+
+  public int getSize() {
+    return size;
+  }
+
+  public void setSize(int size) {
+    this.size = size;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java
new file mode 100644
index 0000000..0e67a87
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import java.util.Map;
+
+public class StepExecutionContextResponse {
+
+  private Map<String, Object> executionContextMap;
+
+  private Long jobExecutionId;
+
+  private Long stepExecutionId;
+
+  private String stepName;
+
+  public StepExecutionContextResponse() {
+  }
+
+  public StepExecutionContextResponse(Map<String, Object> executionContextMap, Long jobExecutionId, Long stepExecutionId, String stepName) {
+    this.executionContextMap = executionContextMap;
+    this.jobExecutionId = jobExecutionId;
+    this.stepExecutionId = stepExecutionId;
+    this.stepName = stepName;
+  }
+
+  public Map<String, Object> getExecutionContextMap() {
+    return executionContextMap;
+  }
+
+  public Long getJobExecutionId() {
+    return jobExecutionId;
+  }
+
+  public Long getStepExecutionId() {
+    return stepExecutionId;
+  }
+
+  public String getStepName() {
+    return stepName;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java
new file mode 100644
index 0000000..ed04767
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import org.apache.ambari.infra.model.wrapper.StepExecutionData;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.StepExecution;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.TimeZone;
+
+public class StepExecutionInfoResponse {
+  private Long id;
+  private Long jobExecutionId;
+  private String jobName;
+  private String name;
+  private String startDate = "-";
+  private String startTime = "-";
+  private String duration = "-";
+  private StepExecutionData stepExecutionData;
+  private long durationMillis;
+
+  public StepExecutionInfoResponse(String jobName, Long jobExecutionId, String name, TimeZone timeZone) {
+    this.jobName = jobName;
+    this.jobExecutionId = jobExecutionId;
+    this.name = name;
+    this.stepExecutionData = new StepExecutionData(new StepExecution(name, new JobExecution(jobExecutionId)));
+  }
+
+  public StepExecutionInfoResponse(StepExecution stepExecution, TimeZone timeZone) {
+    this.stepExecutionData = new StepExecutionData(stepExecution);
+    this.id = stepExecutionData.getId();
+    this.name = stepExecutionData.getStepName();
+    this.jobName = stepExecutionData.getJobExecution() != null && stepExecutionData.getJobExecution().getJobInstance() != null? stepExecutionData.getJobExecution().getJobInstance().getJobName():"?";
+    this.jobExecutionId = stepExecutionData.getJobExecutionId();
+    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+    SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss");
+    SimpleDateFormat durationFormat = new SimpleDateFormat("HH:mm:ss");
+
+    durationFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
+    timeFormat.setTimeZone(timeZone);
+    dateFormat.setTimeZone(timeZone);
+    if(stepExecutionData.getStartTime() != null) {
+      this.startDate = dateFormat.format(stepExecutionData.getStartTime());
+      this.startTime = timeFormat.format(stepExecutionData.getStartTime());
+      Date endTime = stepExecutionData.getEndTime() != null? stepExecutionData.getEndTime():new Date();
+      this.durationMillis = endTime.getTime() - stepExecutionData.getStartTime().getTime();
+      this.duration = durationFormat.format(new Date(this.durationMillis));
+    }
+
+  }
+
+  public Long getId() {
+    return this.id;
+  }
+
+  public Long getJobExecutionId() {
+    return this.jobExecutionId;
+  }
+
+  public String getName() {
+    return this.name;
+  }
+
+  public String getJobName() {
+    return this.jobName;
+  }
+
+  public String getStartDate() {
+    return this.startDate;
+  }
+
+  public String getStartTime() {
+    return this.startTime;
+  }
+
+  public String getDuration() {
+    return this.duration;
+  }
+
+  public long getDurationMillis() {
+    return this.durationMillis;
+  }
+
+  public String getStatus() {
+    return this.id != null?this.stepExecutionData.getStatus().toString():"NONE";
+  }
+
+  public String getExitCode() {
+    return this.id != null?this.stepExecutionData.getExitStatus().getExitCode():"NONE";
+  }
+
+  @JsonIgnore
+  public StepExecutionData getStepExecution() {
+    return this.stepExecutionData;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java
new file mode 100644
index 0000000..26f9ed4
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import org.springframework.batch.admin.history.StepExecutionHistory;
+import org.springframework.batch.admin.web.StepExecutionProgress;
+
+public class StepExecutionProgressResponse {
+
+  private StepExecutionProgress stepExecutionProgress;
+
+  private StepExecutionHistory stepExecutionHistory;
+
+  private StepExecutionInfoResponse stepExecutionInfoResponse;
+
+  public StepExecutionProgressResponse() {
+  }
+
+  public StepExecutionProgressResponse(StepExecutionProgress stepExecutionProgress, StepExecutionHistory stepExecutionHistory,
+                                       StepExecutionInfoResponse stepExecutionInfoResponse) {
+    this.stepExecutionProgress = stepExecutionProgress;
+    this.stepExecutionHistory = stepExecutionHistory;
+    this.stepExecutionInfoResponse = stepExecutionInfoResponse;
+  }
+
+  public StepExecutionProgress getStepExecutionProgress() {
+    return stepExecutionProgress;
+  }
+
+  public StepExecutionHistory getStepExecutionHistory() {
+    return stepExecutionHistory;
+  }
+
+  public StepExecutionInfoResponse getStepExecutionInfoResponse() {
+    return stepExecutionInfoResponse;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionRequest.java
new file mode 100644
index 0000000..2228171
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionRequest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import javax.validation.constraints.NotNull;
+import javax.ws.rs.PathParam;
+
+public class StepExecutionRequest {
+
+  @PathParam("jobExecutionId")
+  @NotNull
+  private Long jobExecutionId;
+
+  @PathParam("stepExecutionId")
+  @NotNull
+  private Long stepExecutionId;
+
+  public Long getJobExecutionId() {
+    return jobExecutionId;
+  }
+
+  public void setJobExecutionId(Long jobExecutionId) {
+    this.jobExecutionId = jobExecutionId;
+  }
+
+  public Long getStepExecutionId() {
+    return stepExecutionId;
+  }
+
+  public void setStepExecutionId(Long stepExecutionId) {
+    this.stepExecutionId = stepExecutionId;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java
new file mode 100644
index 0000000..28e262a
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model.wrapper;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.google.common.collect.Lists;
+import org.springframework.batch.core.BatchStatus;
+import org.springframework.batch.core.ExitStatus;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobInstance;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.item.ExecutionContext;
+
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+
+/**
+ * Wrapper for #{{@link JobExecution}}
+ */
+public class JobExecutionData {
+
+  private JobExecution jobExecution;
+
+  public JobExecutionData(JobExecution jobExecution) {
+    this.jobExecution = jobExecution;
+  }
+
+  @JsonIgnore
+  public JobExecution getJobExecution() {
+    return jobExecution;
+  }
+
+  @JsonIgnore
+  public Collection<StepExecution> getStepExecutions() {
+    return jobExecution.getStepExecutions();
+  }
+
+  public JobParameters getJobParameters() {
+    return jobExecution.getJobParameters();
+  }
+
+  public JobInstance getJobInstance() {
+    return jobExecution.getJobInstance();
+  }
+
+  public Collection<StepExecutionData> getStepExecutionDataList() {
+    List<StepExecutionData> stepExecutionDataList = Lists.newArrayList();
+    Collection<StepExecution> stepExecutions = getStepExecutions();
+    if (stepExecutions != null) {
+      for (StepExecution stepExecution : stepExecutions) {
+        stepExecutionDataList.add(new StepExecutionData(stepExecution));
+      }
+    }
+    return stepExecutionDataList;
+  }
+
+  public BatchStatus getStatus() {
+    return jobExecution.getStatus();
+  }
+
+  public Date getStartTime() {
+    return jobExecution.getStartTime();
+  }
+
+  public Date getCreateTime() {
+    return jobExecution.getCreateTime();
+  }
+
+  public Date getEndTime() {
+    return jobExecution.getEndTime();
+  }
+
+  public Date getLastUpdated() {
+    return jobExecution.getLastUpdated();
+  }
+
+  public ExitStatus getExitStatus() {
+    return jobExecution.getExitStatus();
+  }
+
+  public ExecutionContext getExecutionContext() {
+    return jobExecution.getExecutionContext();
+  }
+
+  public List<Throwable> getFailureExceptions() {
+    return jobExecution.getFailureExceptions();
+  }
+
+  public String getJobConfigurationName() {
+    return jobExecution.getJobConfigurationName();
+  }
+
+  public Long getId() {
+    return jobExecution.getId();
+  }
+
+  public Long getJobId() {
+    return jobExecution.getJobId();
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java
new file mode 100644
index 0000000..26552ae
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model.wrapper;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import org.springframework.batch.core.BatchStatus;
+import org.springframework.batch.core.ExitStatus;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.item.ExecutionContext;
+
+import java.util.Date;
+import java.util.List;
+
+/**
+ * Wrapper for #{{@link StepExecution}}
+ */
+public class StepExecutionData {
+
+  @JsonIgnore
+  private final JobExecution jobExecution;
+
+  @JsonIgnore
+  private final StepExecution stepExecution;
+
+
+  public StepExecutionData(StepExecution stepExecution) {
+    this.stepExecution = stepExecution;
+    this.jobExecution = stepExecution.getJobExecution();
+  }
+
+  @JsonIgnore
+  public JobExecution getJobExecution() {
+    return jobExecution;
+  }
+
+  @JsonIgnore
+  public StepExecution getStepExecution() {
+    return stepExecution;
+  }
+
+  public String getStepName() {
+    return stepExecution.getStepName();
+  }
+
+  public int getReadCount() {
+    return stepExecution.getReadCount();
+  }
+
+  public BatchStatus getStatus() {
+    return stepExecution.getStatus();
+  }
+
+  public int getWriteCount() {
+    return stepExecution.getWriteCount();
+  }
+
+  public int getCommitCount() {
+    return stepExecution.getCommitCount();
+  }
+
+  public int getRollbackCount() {
+    return stepExecution.getRollbackCount();
+  }
+
+  public int getReadSkipCount() {
+    return stepExecution.getReadSkipCount();
+  }
+
+  public int getProcessSkipCount() {
+    return stepExecution.getProcessSkipCount();
+  }
+
+  public Date getStartTime() {
+    return stepExecution.getStartTime();
+  }
+
+  public int getWriteSkipCount() {
+    return stepExecution.getWriteSkipCount();
+  }
+
+  public Date getEndTime() {
+    return stepExecution.getEndTime();
+  }
+
+  public Date getLastUpdated() {
+    return stepExecution.getLastUpdated();
+  }
+
+  public ExecutionContext getExecutionContext() {
+    return stepExecution.getExecutionContext();
+  }
+
+  public ExitStatus getExitStatus() {
+    return stepExecution.getExitStatus();
+  }
+
+  public boolean isTerminateOnly() {
+    return stepExecution.isTerminateOnly();
+  }
+
+  public int getFilterCount() {
+    return stepExecution.getFilterCount();
+  }
+
+  public List<Throwable> getFailureExceptions() {
+    return stepExecution.getFailureExceptions();
+  }
+
+  public Long getId() {
+    return stepExecution.getId();
+  }
+
+  public Long getJobExecutionId() {
+    return stepExecution.getJobExecutionId();
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
new file mode 100644
index 0000000..079cce3
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.rest;
+
+
+import com.google.common.collect.Maps;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.batch.admin.service.NoSuchStepExecutionException;
+import org.springframework.batch.core.JobParametersInvalidException;
+import org.springframework.batch.core.launch.JobExecutionNotFailedException;
+import org.springframework.batch.core.launch.JobExecutionNotRunningException;
+import org.springframework.batch.core.launch.JobExecutionNotStoppedException;
+import org.springframework.batch.core.launch.JobInstanceAlreadyExistsException;
+import org.springframework.batch.core.launch.JobParametersNotFoundException;
+import org.springframework.batch.core.launch.NoSuchJobException;
+import org.springframework.batch.core.launch.NoSuchJobExecutionException;
+import org.springframework.batch.core.launch.NoSuchJobInstanceException;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
+import org.springframework.batch.core.repository.JobRestartException;
+import org.springframework.batch.core.step.NoSuchStepException;
+import org.springframework.web.bind.MethodArgumentNotValidException;
+
+import javax.batch.operations.JobExecutionAlreadyCompleteException;
+import javax.inject.Named;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+import java.util.Map;
+
+@Named
+@Provider
+public class JobExceptionMapper implements ExceptionMapper<Throwable> {
+
+  private static final Logger LOG = LoggerFactory.getLogger(JobExceptionMapper.class);
+
+  private static final Map<Class, Response.Status> exceptionStatusCodeMap = Maps.newHashMap();
+
+  static {
+    exceptionStatusCodeMap.put(MethodArgumentNotValidException.class, Response.Status.BAD_REQUEST);
+    exceptionStatusCodeMap.put(NoSuchJobException.class, Response.Status.NOT_FOUND);
+    exceptionStatusCodeMap.put(NoSuchStepException.class, Response.Status.NOT_FOUND);
+    exceptionStatusCodeMap.put(NoSuchStepExecutionException.class, Response.Status.NOT_FOUND);
+    exceptionStatusCodeMap.put(NoSuchJobExecutionException.class, Response.Status.NOT_FOUND);
+    exceptionStatusCodeMap.put(NoSuchJobInstanceException.class, Response.Status.NOT_FOUND);
+    exceptionStatusCodeMap.put(JobExecutionNotRunningException.class, Response.Status.INTERNAL_SERVER_ERROR);
+    exceptionStatusCodeMap.put(JobExecutionNotStoppedException.class, Response.Status.INTERNAL_SERVER_ERROR);
+    exceptionStatusCodeMap.put(JobInstanceAlreadyExistsException.class, Response.Status.ACCEPTED);
+    exceptionStatusCodeMap.put(JobInstanceAlreadyCompleteException.class, Response.Status.ACCEPTED);
+    exceptionStatusCodeMap.put(JobExecutionAlreadyRunningException.class, Response.Status.ACCEPTED);
+    exceptionStatusCodeMap.put(JobExecutionAlreadyCompleteException.class, Response.Status.ACCEPTED);
+    exceptionStatusCodeMap.put(JobParametersNotFoundException.class, Response.Status.NOT_FOUND);
+    exceptionStatusCodeMap.put(JobExecutionNotFailedException.class, Response.Status.INTERNAL_SERVER_ERROR);
+    exceptionStatusCodeMap.put(JobRestartException.class, Response.Status.INTERNAL_SERVER_ERROR);
+    exceptionStatusCodeMap.put(JobParametersInvalidException.class, Response.Status.BAD_REQUEST);
+  }
+
+  @Override
+  public Response toResponse(Throwable throwable) {
+    LOG.error("REST Exception occurred:", throwable);
+    Response.Status status = Response.Status.INTERNAL_SERVER_ERROR;
+
+    for (Map.Entry<Class, Response.Status> entry : exceptionStatusCodeMap.entrySet()) {
+      if (throwable.getClass().isAssignableFrom(entry.getKey())) {
+        status = entry.getValue();
+        LOG.info("Exception mapped to: {} with status code: {}", entry.getKey().getCanonicalName(), entry.getValue().getStatusCode());
+        break;
+      }
+    }
+
+    return Response.status(status).entity(new StatusMessage(throwable.getMessage(), status.getStatusCode()))
+      .type(MediaType.APPLICATION_JSON_TYPE).build();
+  }
+
+  private class StatusMessage {
+    private String message;
+    private int statusCode;
+
+    StatusMessage(String message, int statusCode) {
+      this.message = message;
+      this.statusCode = statusCode;
+    }
+
+    public String getMessage() {
+      return message;
+    }
+
+    public int getStatusCode() {
+      return statusCode;
+    }
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
new file mode 100644
index 0000000..7023957
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.rest;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.apache.ambari.infra.manager.JobManager;
+import org.apache.ambari.infra.model.ExecutionContextResponse;
+import org.apache.ambari.infra.model.JobDetailsResponse;
+import org.apache.ambari.infra.model.JobExecutionDetailsResponse;
+import org.apache.ambari.infra.model.JobExecutionInfoResponse;
+import org.apache.ambari.infra.model.JobExecutionRequest;
+import org.apache.ambari.infra.model.JobExecutionRestartRequest;
+import org.apache.ambari.infra.model.JobExecutionStopRequest;
+import org.apache.ambari.infra.model.JobInstanceStartRequest;
+import org.apache.ambari.infra.model.JobRequest;
+import org.apache.ambari.infra.model.PageRequest;
+import org.apache.ambari.infra.model.StepExecutionContextResponse;
+import org.apache.ambari.infra.model.StepExecutionInfoResponse;
+import org.apache.ambari.infra.model.StepExecutionProgressResponse;
+import org.apache.ambari.infra.model.StepExecutionRequest;
+import org.springframework.batch.admin.service.NoSuchStepExecutionException;
+import org.springframework.batch.admin.web.JobInfo;
+import org.springframework.batch.core.JobParametersInvalidException;
+import org.springframework.batch.core.launch.JobExecutionNotRunningException;
+import org.springframework.batch.core.launch.JobInstanceAlreadyExistsException;
+import org.springframework.batch.core.launch.NoSuchJobException;
+import org.springframework.batch.core.launch.NoSuchJobExecutionException;
+import org.springframework.batch.core.launch.NoSuchJobInstanceException;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
+import org.springframework.batch.core.repository.JobRestartException;
+import org.springframework.context.annotation.Scope;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+import javax.validation.Valid;
+import javax.validation.constraints.NotNull;
+import javax.ws.rs.BeanParam;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import java.util.List;
+import java.util.Set;
+
+@Api(value = "jobs", description = "Job operations")
+@Path("jobs")
+@Named
+@Scope("request")
+public class JobResource {
+
+  @Inject
+  private JobManager jobManager;
+
+  @GET
+  @Produces({"application/json"})
+  @ApiOperation("Get all jobs")
+  public List<JobInfo> getAllJobs(@BeanParam @Valid PageRequest request) {
+    return jobManager.getAllJobs(request.getPage(), request.getSize());
+  }
+
+  @POST
+  @Produces({"application/json"})
+  @Path("{jobName}")
+  @ApiOperation("Start a new job instance by job name.")
+  public JobExecutionInfoResponse startJob(@BeanParam @Valid JobInstanceStartRequest request)
+    throws JobParametersInvalidException, JobInstanceAlreadyExistsException, NoSuchJobException, JobExecutionAlreadyRunningException,
+    JobRestartException, JobInstanceAlreadyCompleteException {
+    return jobManager.launchJob(request.getJobName(), request.getParams());
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @Path("/info/names")
+  @ApiOperation("Get all job names")
+  public Set<String> getAllJobNames() {
+    return jobManager.getAllJobNames();
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @Path("/info/{jobName}")
+  @ApiOperation("Get job details by job name.")
+  public JobDetailsResponse getJobDetails(@BeanParam @Valid JobRequest jobRequest) throws NoSuchJobException {
+    return jobManager.getJobDetails(jobRequest.getJobName(), jobRequest.getPage(), jobRequest.getSize());
+  }
+
+  @GET
+  @Path("{jobName}/executions")
+  @Produces({"application/json"})
+  @ApiOperation("Get the id values of all the running job instances.")
+  public Set<Long> getExecutionIdsByJobName(@PathParam("jobName") @NotNull @Valid String jobName) throws NoSuchJobException {
+    return jobManager.getExecutionIdsByJobName(jobName);
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @Path("/executions/{jobExecutionId}")
+  @ApiOperation("Get job and step details for job execution instance.")
+  public JobExecutionDetailsResponse getExectionInfo(@PathParam("jobExecutionId") @Valid Long jobExecutionId) throws NoSuchJobExecutionException {
+    return jobManager.getExectionInfo(jobExecutionId);
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @Path("/executions/{jobExecutionId}/context")
+  @ApiOperation("Get execution context for specific job.")
+  public ExecutionContextResponse getExecutionContextByJobExecId(@PathParam("jobExecutionId") Long executionId) throws NoSuchJobExecutionException {
+    return jobManager.getExecutionContextByJobExecutionId(executionId);
+  }
+
+
+  @DELETE
+  @Produces({"application/json"})
+  @Path("/executions/{jobExecutionId}")
+  @ApiOperation("Stop or abandon a running job execution.")
+  public JobExecutionInfoResponse stopOrAbandonJobExecution(@BeanParam @Valid JobExecutionStopRequest request)
+    throws NoSuchJobExecutionException, JobExecutionNotRunningException, JobExecutionAlreadyRunningException {
+    return jobManager.stopOrAbandonJobByExecutionId(request.getJobExecutionId(), request.getOperation());
+  }
+
+  @DELETE
+  @Produces({"application/json"})
+  @Path("/executions")
+  @ApiOperation("Stop all job executions.")
+  public Integer stopAll() {
+    return jobManager.stopAllJobs();
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @Path("/{jobName}/{jobInstanceId}/executions")
+  @ApiOperation("Get execution for job instance.")
+  public List<JobExecutionInfoResponse> getExecutionsForInstance(@BeanParam @Valid JobExecutionRequest request) throws JobInstanceAlreadyCompleteException,
+    NoSuchJobExecutionException, JobExecutionAlreadyRunningException, JobParametersInvalidException, JobRestartException, NoSuchJobException, NoSuchJobInstanceException {
+    return jobManager.getExecutionsForJobInstance(request.getJobName(), request.getJobInstanceId());
+  }
+
+  @POST
+  @Produces({"application/json"})
+  @Path("/{jobName}/{jobInstanceId}/executions")
+  @ApiOperation("Restart job instance.")
+  public JobExecutionInfoResponse restartJobInstance(@BeanParam @Valid JobExecutionRestartRequest request) throws JobInstanceAlreadyCompleteException,
+    NoSuchJobExecutionException, JobExecutionAlreadyRunningException, JobParametersInvalidException, JobRestartException, NoSuchJobException {
+    return jobManager.restart(request.getJobInstanceId(), request.getJobName(), request.getOperation());
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @Path("/executions/{jobExecutionId}/steps/{stepExecutionId}")
+  @ApiOperation("Get step execution details.")
+  public StepExecutionInfoResponse getStepExecution(@BeanParam @Valid StepExecutionRequest request) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
+    return jobManager.getStepExecution(request.getJobExecutionId(), request.getStepExecutionId());
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @Path("/executions/{jobExecutionId}/steps/{stepExecutionId}/execution-context")
+  @ApiOperation("Get the execution context of step execution.")
+  public StepExecutionContextResponse getStepExecutionContext(@BeanParam @Valid StepExecutionRequest request) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
+    return jobManager.getStepExecutionContext(request.getJobExecutionId(), request.getStepExecutionId());
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @Path("/executions/{jobExecutionId}/steps/{stepExecutionId}/progress")
+  @ApiOperation("Get progress of step execution.")
+  public StepExecutionProgressResponse getStepExecutionProgress(@BeanParam @Valid StepExecutionRequest request) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
+    return jobManager.getStepExecutionProgress(request.getJobExecutionId(), request.getStepExecutionId());
+  }
+
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/dummy/dummy.txt b/ambari-infra/ambari-infra-manager/src/main/resources/dummy/dummy.txt
new file mode 100644
index 0000000..41da725
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/dummy/dummy.txt
@@ -0,0 +1,3 @@
+f1,f2
+v1,v2
+v3,v4
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh
new file mode 100644
index 0000000..c7e11c3
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Extend with java options or system properties. e.g.: INFRA_MANAGER_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=5007,server=y,suspend=n"
+export INFRA_MANAGER_OPTS=""
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
new file mode 100644
index 0000000..fbeac78
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+infra-manager.batch.db.file=job-repository.db
+infra-manager.batch.db.init=true
+infra-manager.batch.db.username=admin
+infra-manager.batch.db.password=admin
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh b/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
new file mode 100644
index 0000000..65287b2
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+JVM="java"
+sdir="`dirname \"$0\"`"
+
+PATH=$JAVA_HOME/bin:$PATH nohup $JVM -classpath "/etc/ambari-infra-manager/conf:$sdir:$sdir/libs/*" $INFRA_MANAGER_OPTS org.apache.ambari.infra.InfraManager ${1+"$@"} &
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/log4j.xml b/ambari-infra/ambari-infra-manager/src/main/resources/log4j.xml
new file mode 100644
index 0000000..0450454
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/log4j.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+  <appender name="console" class="org.apache.log4j.ConsoleAppender">
+    <param name="Target" value="System.out" />
+    <layout class="org.apache.log4j.PatternLayout">
+      <param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n" />
+    </layout>
+  </appender>
+
+  <root>
+    <level value="INFO" />
+    <appender-ref ref="console" />
+  </root>
+</log4j:configuration>
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html b/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html
new file mode 100644
index 0000000..8580e1a
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html
@@ -0,0 +1,115 @@
+<!DOCTYPE html>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<html>
+<head>
+    <title>Infra Manager REST API</title>
+    <link rel="icon" type="image/png" href="images/favicon-32x32.png" sizes="32x32" />
+    <link rel="icon" type="image/png" href="images/favicon-16x16.png" sizes="16x16" />
+    <link href='css/typography.css' media='screen' rel='stylesheet' type='text/css'/>
+    <link href='css/reset.css' media='screen' rel='stylesheet' type='text/css'/>
+    <link href='css/screen.css' media='screen' rel='stylesheet' type='text/css'/>
+    <link href='css/reset.css' media='print' rel='stylesheet' type='text/css'/>
+    <link href='css/print.css' media='print' rel='stylesheet' type='text/css'/>
+    <script src='lib/jquery-1.8.0.min.js' type='text/javascript'></script>
+    <script src='lib/jquery.slideto.min.js' type='text/javascript'></script>
+    <script src='lib/jquery.wiggle.min.js' type='text/javascript'></script>
+    <script src='lib/jquery.ba-bbq.min.js' type='text/javascript'></script>
+    <script src='lib/handlebars-2.0.0.js' type='text/javascript'></script>
+    <script src='lib/underscore-min.js' type='text/javascript'></script>
+    <script src='lib/backbone-min.js' type='text/javascript'></script>
+    <script src='swagger-ui.js' type='text/javascript'></script>
+    <script src='lib/highlight.7.3.pack.js' type='text/javascript'></script>
+    <script src='lib/marked.js' type='text/javascript'></script>
+    <script src='lib/swagger-oauth.js' type='text/javascript'></script>
+
+    <script type="text/javascript">
+        $(function () {
+            var url = window.location.search.match(/url=([^&]+)/);
+            if (url && url.length > 1) {
+                url = decodeURIComponent(url[1]);
+            } else {
+                var urlPrefix = location.protocol +'//'+ location.hostname+(location.port ? ':'+location.port: '');
+                url = urlPrefix + "/api/v1/swagger.yaml";
+            }
+            window.swaggerUi = new SwaggerUi({
+                url: url,
+                dom_id: "swagger-ui-container",
+                supportedSubmitMethods: ['get', 'post', 'put', 'delete', 'patch'],
+                onComplete: function(swaggerApi, swaggerUi){
+                    if(typeof initOAuth == "function") {
+                        initOAuth({
+                            clientId: "your-client-id",
+                            realm: "your-realms",
+                            appName: "your-app-name"
+                        });
+                    }
+
+                    $('pre code').each(function(i, e) {
+                        hljs.highlightBlock(e)
+                    });
+
+                    addApiKeyAuthorization();
+                },
+                onFailure: function(data) {
+                    log("Unable to Load SwaggerUI");
+                },
+                docExpansion: "none",
+                apisSorter: "alpha",
+                showRequestHeaders: false
+            });
+
+            function addApiKeyAuthorization(){
+                var username = encodeURIComponent($('#input_username')[0].value);
+                var password = encodeURIComponent($('#input_password')[0].value);
+                if (username && username.trim() != "" && password && password != "") {
+                    var apiKeyAuth = new SwaggerClient.PasswordAuthorization("Authorization", username, password);
+                    window.swaggerUi.api.clientAuthorizations.add("key", apiKeyAuth);
+                    log("added authorization header: " + 'Basic ' + btoa(username + ':' + password));
+                }
+            }
+
+            $('#input_username, #input_password').change(addApiKeyAuthorization);
+
+            window.swaggerUi.load();
+
+            function log() {
+                if ('console' in window) {
+                    console.log.apply(console, arguments);
+                }
+            }
+        });
+    </script>
+</head>
+
+<body class="swagger-section">
+<div id='header'>
+    <div class="swagger-ui-wrap">
+        <a id="logo" href="http://swagger.io">swagger</a>
+        <form id='api_selector'>
+            <div class='input'><input placeholder="http://example.com/api" id="input_baseUrl" name="baseUrl" type="text"/></div>
+            <div class="input"><input placeholder="username" id="input_username" name="username" type="text" size="10"></div>
+            <div class="input"><input placeholder="password" id="input_password" name="password" type="password" size="10"></div>
+            <div class='input'><a id="explore" href="#">Explore</a></div>
+        </form>
+    </div>
+</div>
+
+<div id="message-bar" class="swagger-ui-wrap">&nbsp;</div>
+<div id="swagger-ui-container" class="swagger-ui-wrap"></div>
+</body>
+</html>
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/webapp/index.html b/ambari-infra/ambari-infra-manager/src/main/resources/webapp/index.html
new file mode 100644
index 0000000..3e64867
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/webapp/index.html
@@ -0,0 +1,24 @@
+<!DOCTYPE html>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<html>
+  <head>
+  </head>
+  <body>
+    <h1>Welcome!</h1>
+  </body>
+</html>
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
index d5d971c..9479679 100644
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
+++ b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
@@ -110,7 +110,7 @@
     List<String> collections = listCollections();
     if (!collections.contains(getCollection())) {
       String collection = new CreateCollectionCommand(getRetryTimes(), getInterval()).run(this);
-      LOG.info("Collection '{}' created.", collection);
+      LOG.info("Collection '{}' creation request sent.", collection);
     } else {
       LOG.info("Collection '{}' already exits.", getCollection());
       if (this.isSplitting()) {
@@ -234,7 +234,7 @@
       for (String shardName : shardList) {
         if (!existingShards.contains(shardName)) {
           new CreateShardCommand(shardName, getRetryTimes(), getInterval()).run(this);
-          LOG.info("New shard added to collection '{}': {}", getCollection(), shardName);
+          LOG.info("Adding new shard to collection request sent ('{}': {})", getCollection(), shardName);
           existingShards.add(shardName);
         }
       }
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/resources/solrCloudCli.sh b/ambari-infra/ambari-infra-solr-client/src/main/resources/solrCloudCli.sh
index cd47f06..7bfa864 100644
--- a/ambari-infra/ambari-infra-solr-client/src/main/resources/solrCloudCli.sh
+++ b/ambari-infra/ambari-infra-solr-client/src/main/resources/solrCloudCli.sh
@@ -17,4 +17,4 @@
 JVM="java"
 sdir="`dirname \"$0\"`"
 
-PATH=$JAVA_HOME/bin:$PATH $JVM -classpath "$sdir:$sdir/libs/*" org.apache.ambari.logsearch.solr.AmbariSolrCloudCLI ${1+"$@"}
\ No newline at end of file
+PATH=$JAVA_HOME/bin:$PATH $JVM -classpath "$sdir:$sdir/libs/*" org.apache.ambari.infra.solr.AmbariSolrCloudCLI ${1+"$@"}
\ No newline at end of file
diff --git a/ambari-infra/pom.xml b/ambari-infra/pom.xml
index a6a6961..a5a5b38 100644
--- a/ambari-infra/pom.xml
+++ b/ambari-infra/pom.xml
@@ -43,6 +43,7 @@
     <module>ambari-infra-assembly</module>
     <module>ambari-infra-solr-client</module>
     <module>ambari-infra-solr-plugin</module>
+    <module>ambari-infra-manager</module>
   </modules>
 
   <build>
diff --git a/ambari-logsearch/README.md b/ambari-logsearch/README.md
index 5c41fcd..d05f45a 100644
--- a/ambari-logsearch/README.md
+++ b/ambari-logsearch/README.md
@@ -36,10 +36,15 @@
 
 ## Running Integration Tests
 
-By default integration tests are not a part of the build process, you need to set ${it.skip} variable to true (docker needed here too)
+By default integration tests are not a part of the build process, you need to set -Dbackend-tests or -Dselenium-tests (or you can use -Dall-tests to run both). To running the tests you will need docker here as well (right now docker-for-mac and unix are supported by default, for boot2docker you need to pass -Ddocker.host parameter to the build).
 
 ```bash
 # from ambari-logsearch folder
-mvn clean integration-test -Dit.skip=false
+mvn clean integration-test -Dbackend-tests failsafe:verify
+# or run selenium tests with docker for mac, but before that you nedd to start xquartz
+xquartz
+# then in an another window you can start ui tests
+mvn clean integration-test -Dselenium-tests failsafe:verify
+# you can specify story file folde location with -Dbackend.stories.location and -Dui.stories.location (absolute file path) in the commands
 ```
 Also you can run from the IDE, but make sure all of the ambari logsearch modules are built.
diff --git a/ambari-logsearch/ambari-logsearch-config-api/.gitignore b/ambari-logsearch/ambari-logsearch-config-api/.gitignore
new file mode 100644
index 0000000..ae3c172
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/.gitignore
@@ -0,0 +1 @@
+/bin/
diff --git a/ambari-logsearch/ambari-logsearch-config-api/pom.xml b/ambari-logsearch/ambari-logsearch-config-api/pom.xml
new file mode 100644
index 0000000..5355906
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/pom.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <parent>
+    <artifactId>ambari-logsearch</artifactId>
+    <groupId>org.apache.ambari</groupId>
+    <version>2.0.0.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+
+  <artifactId>ambari-logsearch-config-api</artifactId>
+  <packaging>jar</packaging>
+  <name>Ambari Logsearch Config Api</name>
+  <url>http://maven.apache.org</url>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>1.7.7</version>
+    </dependency>
+  </dependencies>
+</project>
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/InputConfigMonitor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/InputConfigMonitor.java
new file mode 100644
index 0000000..746c14c
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/InputConfigMonitor.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.util.List;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig;
+
+/**
+ * Monitors input configuration changes.
+ */
+public interface InputConfigMonitor {
+  /**
+   * @return A list of json strings for all the global config jsons.
+   */
+  List<String> getGlobalConfigJsons();
+  
+  /**
+   * Notification of a new input configuration.
+   * 
+   * @param serviceName The name of the service for which the input configuration was created.
+   * @param inputConfig The input configuration.
+   * @throws Exception
+   */
+  void loadInputConfigs(String serviceName, InputConfig inputConfig) throws Exception;
+  
+  /**
+   * Notification of the removal of an input configuration.
+   * 
+   * @param serviceName The name of the service of which's input configuration was removed.
+   */
+  void removeInputs(String serviceName);
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterMonitor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterMonitor.java
new file mode 100644
index 0000000..766f751
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterMonitor.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Monitors log level filter changes.
+ */
+package org.apache.ambari.logsearch.config.api;
+
+import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter;
+
+public interface LogLevelFilterMonitor {
+
+  /**
+   * Notification of a new or updated log level filter.
+   * 
+   * @param logId The log for which the log level filter was created/updated.
+   * @param logLevelFilter The log level filter to apply from now on to the log.
+   */
+  void setLogLevelFilter(String logId, LogLevelFilter logLevelFilter);
+
+  /**
+   * Notification of the removal of a log level filter.
+   * 
+   * @param logId The log of which's log level filter was removed.
+   */
+  void removeLogLevelFilter(String logId);
+
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfig.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfig.java
new file mode 100644
index 0000000..4cbf21f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfig.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.io.Closeable;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter;
+import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig;
+
+/**
+ * Log Search Configuration, which uploads, retrieves configurations, and monitors it's changes.
+ */
+public interface LogSearchConfig extends Closeable {
+  /**
+   * Enumeration of the components of the Log Search service.
+   */
+  public enum Component {
+    SERVER, LOGFEEDER;
+  }
+
+  /**
+   * Initialization of the configuration.
+   * 
+   * @param component The component which will use the configuration.
+   * @param properties The properties of that component.
+   * @throws Exception
+   */
+  void init(Component component, Map<String, String> properties) throws Exception;
+
+  /**
+   * Returns all the service names with input configurations of a cluster. Will be used only in SERVER mode.
+   * 
+   * @param clusterName The name of the cluster which's services are required.
+   * @return List of the service names.
+   */
+  List<String> getServices(String clusterName);
+
+  /**
+   * Checks if input configuration exists.
+   * 
+   * @param clusterName The name of the cluster where the service is looked for.
+   * @param serviceName The name of the service looked for.
+   * @return If input configuration exists for the service.
+   * @throws Exception
+   */
+  boolean inputConfigExists(String clusterName, String serviceName) throws Exception;
+
+  /**
+   * Returns the input configuration of a service in a cluster. Will be used only in SERVER mode.
+   * 
+   * @param clusterName The name of the cluster where the service is looked for.
+   * @param serviceName The name of the service looked for.
+   * @return The input configuration for the service if it exists, null otherwise.
+   */
+  InputConfig getInputConfig(String clusterName, String serviceName);
+
+  /**
+   * Uploads the input configuration for a service in a cluster.
+   * 
+   * @param clusterName The name of the cluster where the service is.
+   * @param serviceName The name of the service of which's input configuration is uploaded.
+   * @param inputConfig The input configuration of the service.
+   * @throws Exception
+   */
+  void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception;
+
+  /**
+   * Modifies the input configuration for a service in a cluster.
+   * 
+   * @param clusterName The name of the cluster where the service is.
+   * @param serviceName The name of the service of which's input configuration is uploaded.
+   * @param inputConfig The input configuration of the service.
+   * @throws Exception
+   */
+  void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception;
+
+  /**
+   * Uploads the log level filter of a log.
+   * 
+   * @param clusterName The name of the cluster where the log is.
+   * @param logId The id of the log.
+   * @param filter The log level filter for the log.
+   * @throws Exception 
+   */
+  void createLogLevelFilter(String clusterName, String logId, LogLevelFilter filter) throws Exception;
+
+  /**
+   * Modifies the log level filters for all the logs.
+   * 
+   * @param clusterName The name of the cluster where the logs are.
+   * @param filters The log level filters to set.
+   * @throws Exception
+   */
+  void setLogLevelFilters(String clusterName, LogLevelFilterMap filters) throws Exception;
+
+  /**
+   * Returns the Log Level Filters of a cluster.
+   * 
+   * @param clusterName The name of the cluster which's log level filters are required.
+   * @return All the log level filters of the cluster.
+   */
+  LogLevelFilterMap getLogLevelFilters(String clusterName);
+
+  /**
+   * Starts the monitoring of the input configurations, asynchronously. Will be used only in LOGFEEDER mode.
+   * 
+   * @param inputConfigMonitor The input config monitor to call in case of an input config change.
+   * @param logLevelFilterMonitor The log level filter monitor to call in case of a log level filter change.
+   * @throws Exception
+   */
+  void monitorInputConfigChanges(InputConfigMonitor inputConfigMonitor, LogLevelFilterMonitor logLevelFilterMonitor) throws Exception;
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactory.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactory.java
new file mode 100644
index 0000000..947e7e7
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactory.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.LogSearchConfig.Component;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Factory class for LogSearchConfig.
+ */
+public class LogSearchConfigFactory {
+  private static final Logger LOG = LoggerFactory.getLogger(LogSearchConfigFactory.class);
+
+  /**
+   * Creates a Log Search Configuration instance that implements {@link org.apache.ambari.logsearch.config.api.LogSearchConfig}.
+   * 
+   * @param component The component of the Log Search Service to create the configuration for (SERVER/LOGFEEDER).
+   * @param properties The properties of the component for which the configuration is created. If the properties contain the
+   *                  "logsearch.config.class" entry than the class defined there would be used instead of the default class.
+   * @param defaultClass The default configuration class to use if not specified otherwise.
+   * @return The Log Search Configuration instance.
+   * @throws Exception Throws exception if the defined class does not implement LogSearchConfig, or doesn't have an empty
+   *                   constructor, or throws an exception in it's init method.
+   */
+  public static LogSearchConfig createLogSearchConfig(Component component, Map<String, String> properties,
+      Class<? extends LogSearchConfig> defaultClass) throws Exception {
+    try {
+      LogSearchConfig logSearchConfig = null;
+      String configClassName = properties.get("logsearch.config.class");
+      if (configClassName != null && !"".equals(configClassName.trim())) {
+        Class<?> clazz = Class.forName(configClassName);
+        if (LogSearchConfig.class.isAssignableFrom(clazz)) {
+          logSearchConfig = (LogSearchConfig) clazz.newInstance();
+        } else {
+          throw new IllegalArgumentException("Class " + configClassName + " does not implement the interface " + LogSearchConfig.class.getName());
+        }
+      } else {
+        logSearchConfig = defaultClass.newInstance();
+      }
+      
+      logSearchConfig.init(component, properties);
+      return logSearchConfig;
+    } catch (Exception e) {
+      LOG.error("Could not initialize logsearch config.", e);
+      throw e;
+    }
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Conditions.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Conditions.java
new file mode 100644
index 0000000..4da400a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Conditions.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface Conditions {
+  Fields getFields();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Fields.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Fields.java
new file mode 100644
index 0000000..5d34b1e
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Fields.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+import java.util.Set;
+
+public interface Fields {
+  Set<String> getType();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterDescriptor.java
new file mode 100644
index 0000000..632c6cb
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterDescriptor.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+import java.util.List;
+import java.util.Map;
+
+public interface FilterDescriptor {
+  String getFilter();
+
+  Conditions getConditions();
+
+  Integer getSortOrder();
+
+  String getSourceField();
+
+  Boolean isRemoveSourceField();
+
+  Map<String, ? extends List<? extends PostMapValues>> getPostMapValues();
+
+  Boolean isEnabled();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterGrokDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterGrokDescriptor.java
new file mode 100644
index 0000000..e85ce97
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterGrokDescriptor.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface FilterGrokDescriptor extends FilterDescriptor {
+  String getLog4jFormat();
+
+  String getMultilinePattern();
+
+  String getMessagePattern();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterJsonDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterJsonDescriptor.java
new file mode 100644
index 0000000..08f1893
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterJsonDescriptor.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface FilterJsonDescriptor extends FilterDescriptor {
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterKeyValueDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterKeyValueDescriptor.java
new file mode 100644
index 0000000..6edd140
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterKeyValueDescriptor.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface FilterKeyValueDescriptor extends FilterDescriptor {
+  String getFieldSplit();
+
+  String getValueSplit();
+
+  String getValueBorders();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputConfig.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputConfig.java
new file mode 100644
index 0000000..8126ac9
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputConfig.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+import java.util.List;
+
+public interface InputConfig {
+  List<? extends InputDescriptor> getInput();
+
+  List<? extends FilterDescriptor> getFilter();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputDescriptor.java
new file mode 100644
index 0000000..c41da93
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputDescriptor.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+import java.util.Map;
+
+public interface InputDescriptor {
+  String getType();
+
+  String getRowtype();
+
+  String getPath();
+
+  Map<String, String> getAddFields();
+
+  String getSource();
+
+  Boolean isTail();
+
+  Boolean isGenEventMd5();
+
+  Boolean isUseEventMd5AsId();
+
+  String getStartPosition();
+
+  Boolean isCacheEnabled();
+
+  String getCacheKeyField();
+
+  Boolean getCacheLastDedupEnabled();
+
+  Integer getCacheSize();
+
+  Long getCacheDedupInterval();
+
+  Boolean isEnabled();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileBaseDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileBaseDescriptor.java
new file mode 100644
index 0000000..a393dc7
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileBaseDescriptor.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface InputFileBaseDescriptor extends InputDescriptor {
+  Boolean getProcessFile();
+
+  Boolean getCopyFile();
+
+  Integer getCheckpointIntervalMs();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileDescriptor.java
new file mode 100644
index 0000000..0070ad9
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileDescriptor.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface InputFileDescriptor extends InputFileBaseDescriptor {
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputS3FileDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputS3FileDescriptor.java
new file mode 100644
index 0000000..b075629
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputS3FileDescriptor.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface InputS3FileDescriptor extends InputFileBaseDescriptor {
+  String getS3AccessKey();
+
+  String getS3SecretKey();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapDateDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapDateDescriptor.java
new file mode 100644
index 0000000..f88435f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapDateDescriptor.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface MapDateDescriptor extends MapFieldDescriptor {
+  String getSourceDatePattern();
+
+  public String getTargetDatePattern();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldCopyDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldCopyDescriptor.java
new file mode 100644
index 0000000..596c173
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldCopyDescriptor.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface MapFieldCopyDescriptor extends MapFieldDescriptor {
+  String getCopyName();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldDescriptor.java
new file mode 100644
index 0000000..db086c5
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldDescriptor.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface MapFieldDescriptor {
+  public String getJsonName();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldNameDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldNameDescriptor.java
new file mode 100644
index 0000000..da8cd0d
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldNameDescriptor.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface MapFieldNameDescriptor extends MapFieldDescriptor {
+  String getNewFieldName();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldValueDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldValueDescriptor.java
new file mode 100644
index 0000000..cf37e62
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldValueDescriptor.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+public interface MapFieldValueDescriptor extends MapFieldDescriptor {
+  String getPreValue();
+
+  public String getPostValue();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/PostMapValues.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/PostMapValues.java
new file mode 100644
index 0000000..5be7287
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/PostMapValues.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api.model.inputconfig;
+
+import java.util.List;
+
+public interface PostMapValues {
+  List<MapFieldDescriptor> getMappers();
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilter.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilter.java
new file mode 100644
index 0000000..06cf589
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilter.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.config.api.model.loglevelfilter;
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+public class LogLevelFilter {
+
+  private String label;
+  private List<String> hosts;
+  private List<String> defaultLevels;
+  private List<String> overrideLevels;
+  private Date expiryTime;
+
+  public LogLevelFilter() {
+    hosts = new ArrayList<String>();
+    defaultLevels = new ArrayList<String>();
+    overrideLevels = new ArrayList<String>();
+  }
+
+  public String getLabel() {
+    return label;
+  }
+
+  public void setLabel(String label) {
+    this.label = label;
+  }
+
+  public List<String> getHosts() {
+    return hosts;
+  }
+
+  public void setHosts(List<String> hosts) {
+    this.hosts = hosts;
+  }
+
+  public List<String> getDefaultLevels() {
+    return defaultLevels;
+  }
+
+  public void setDefaultLevels(List<String> defaultLevels) {
+    this.defaultLevels = defaultLevels;
+  }
+
+  public List<String> getOverrideLevels() {
+    return overrideLevels;
+  }
+
+  public void setOverrideLevels(List<String> overrideLevels) {
+    this.overrideLevels = overrideLevels;
+  }
+
+  public Date getExpiryTime() {
+    return expiryTime;
+  }
+
+  public void setExpiryTime(Date expiryTime) {
+    this.expiryTime = expiryTime;
+  }
+
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilterMap.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilterMap.java
new file mode 100644
index 0000000..37fdb9f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilterMap.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.config.api.model.loglevelfilter;
+
+import java.util.TreeMap;
+
+public class LogLevelFilterMap {
+  private TreeMap<String, LogLevelFilter> filter;
+
+  public TreeMap<String, LogLevelFilter> getFilter() {
+    return filter;
+  }
+
+  public void setFilter(TreeMap<String, LogLevelFilter> filter) {
+    this.filter = filter;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass1.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass1.java
new file mode 100644
index 0000000..d7e3c0a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass1.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.InputConfigMonitor;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter;
+import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig;
+
+public class LogSearchConfigClass1 implements LogSearchConfig {
+  @Override
+  public void init(Component component, Map<String, String> properties) {}
+
+  @Override
+  public boolean inputConfigExists(String clusterName, String serviceName) throws Exception {
+    return false;
+  }
+
+  @Override
+  public void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {}
+
+  @Override
+  public void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {}
+
+  @Override
+  public void monitorInputConfigChanges(InputConfigMonitor inputConfigMonitor, LogLevelFilterMonitor logLevelFilterMonitor)
+      throws Exception {}
+
+  @Override
+  public List<String> getServices(String clusterName) {
+    return null;
+  }
+
+  @Override
+  public InputConfig getInputConfig(String clusterName, String serviceName) {
+    return null;
+  }
+
+  @Override
+  public void createLogLevelFilter(String clusterName, String logId, LogLevelFilter filter) {}
+
+  @Override
+  public void setLogLevelFilters(String clusterName, LogLevelFilterMap filters) throws Exception {}
+
+  @Override
+  public LogLevelFilterMap getLogLevelFilters(String clusterName) {
+    return null;
+  }
+
+  @Override
+  public void close() {}
+}
\ No newline at end of file
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass2.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass2.java
new file mode 100644
index 0000000..198c133
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass2.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.InputConfigMonitor;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter;
+import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig;
+
+public class LogSearchConfigClass2 implements LogSearchConfig {
+  @Override
+  public void init(Component component, Map<String, String> properties) {}
+
+  @Override
+  public boolean inputConfigExists(String clusterName, String serviceName) throws Exception {
+    return false;
+  }
+
+  @Override
+  public void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {}
+
+  @Override
+  public void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {}
+
+  @Override
+  public void monitorInputConfigChanges(InputConfigMonitor inputConfigMonitor, LogLevelFilterMonitor logLevelFilterMonitor)
+      throws Exception {}
+
+  @Override
+  public List<String> getServices(String clusterName) {
+    return null;
+  }
+
+  @Override
+  public InputConfig getInputConfig(String clusterName, String serviceName) {
+    return null;
+  }
+
+  @Override
+  public void createLogLevelFilter(String clusterName, String logId, LogLevelFilter filter) {}
+
+  @Override
+  public void setLogLevelFilters(String clusterName, LogLevelFilterMap filters) throws Exception {}
+
+  @Override
+  public LogLevelFilterMap getLogLevelFilters(String clusterName) {
+    return null;
+  }
+
+  @Override
+  public void close() {}
+}
\ No newline at end of file
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactoryTest.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactoryTest.java
new file mode 100644
index 0000000..425694f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactoryTest.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.LogSearchConfig.Component;
+import org.junit.Test;
+
+import junit.framework.Assert;
+
+public class LogSearchConfigFactoryTest {
+
+  @Test
+  public void testDefaultConfig() throws Exception {
+    LogSearchConfig config = LogSearchConfigFactory.createLogSearchConfig(Component.SERVER,
+        Collections.<String, String> emptyMap(), LogSearchConfigClass1.class);
+    
+    Assert.assertSame(config.getClass(), LogSearchConfigClass1.class);
+  }
+
+  @Test
+  public void testCustomConfig() throws Exception {
+    Map<String, String> logsearchConfClassMap = new HashMap<>();
+    logsearchConfClassMap.put("logsearch.config.class", "org.apache.ambari.logsearch.config.api.LogSearchConfigClass2");
+    LogSearchConfig config = LogSearchConfigFactory.createLogSearchConfig(Component.SERVER,
+      logsearchConfClassMap, LogSearchConfigClass1.class);
+    
+    Assert.assertSame(config.getClass(), LogSearchConfigClass2.class);
+  }
+  
+  @Test(expected = IllegalArgumentException.class)
+  public void testNonConfigClass() throws Exception {
+    Map<String, String> logsearchConfClassMap = new HashMap<>();
+    logsearchConfClassMap.put("logsearch.config.class", "org.apache.ambari.logsearch.config.api.NonLogSearchConfigClass");
+    LogSearchConfigFactory.createLogSearchConfig(Component.SERVER,
+      logsearchConfClassMap, LogSearchConfigClass1.class);
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/NonLogSearchConfigClass.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/NonLogSearchConfigClass.java
new file mode 100644
index 0000000..9564f33
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/NonLogSearchConfigClass.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+public class NonLogSearchConfigClass {
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/resources/log4j.xml b/ambari-logsearch/ambari-logsearch-config-api/src/test/resources/log4j.xml
new file mode 100644
index 0000000..6d968f9
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/test/resources/log4j.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+  <appender name="console" class="org.apache.log4j.ConsoleAppender">
+    <param name="Target" value="System.out" />
+    <layout class="org.apache.log4j.PatternLayout">
+      <param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n" />
+      <!-- <param name="ConversionPattern" value="%d [%t] %-5p %c %x - %m%n"/> -->
+    </layout>
+  </appender>
+
+  <root>
+    <priority value="warn" />
+    <appender-ref ref="console" />
+  </root>
+
+</log4j:configuration>
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/.gitignore b/ambari-logsearch/ambari-logsearch-config-zookeeper/.gitignore
new file mode 100644
index 0000000..ae3c172
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/.gitignore
@@ -0,0 +1 @@
+/bin/
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/pom.xml b/ambari-logsearch/ambari-logsearch-config-zookeeper/pom.xml
new file mode 100644
index 0000000..7ecda60
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/pom.xml
@@ -0,0 +1,84 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <parent>
+    <artifactId>ambari-logsearch</artifactId>
+    <groupId>org.apache.ambari</groupId>
+    <version>2.0.0.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+
+  <artifactId>ambari-logsearch-config-zookeeper</artifactId>
+  <packaging>jar</packaging>
+  <name>Ambari Logsearch Config Zookeeper</name>
+  <url>http://maven.apache.org</url>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-config-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.4</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-collections</groupId>
+      <artifactId>commons-collections</artifactId>
+      <version>3.2.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>3.4.9</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-framework</artifactId>
+      <version>2.12.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-client</artifactId>
+      <version>2.12.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-recipes</artifactId>
+      <version>2.12.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>1.7.7</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.6.2</version>
+    </dependency>
+  </dependencies>
+</project>
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZK.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZK.java
new file mode 100644
index 0000000..4d10a5b
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZK.java
@@ -0,0 +1,362 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter;
+import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputAdapter;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputConfigGson;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputConfigImpl;
+import org.apache.ambari.logsearch.config.api.InputConfigMonitor;
+import org.apache.ambari.logsearch.config.api.LogLevelFilterMonitor;
+import org.apache.commons.collections.MapUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.framework.recipes.cache.ChildData;
+import org.apache.curator.framework.recipes.cache.TreeCache;
+import org.apache.curator.framework.recipes.cache.TreeCacheEvent;
+import org.apache.curator.framework.recipes.cache.TreeCacheEvent.Type;
+import org.apache.curator.framework.recipes.cache.TreeCacheListener;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.curator.utils.ZKPaths;
+import org.apache.zookeeper.KeeperException.NodeExistsException;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Id;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Splitter;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+
+public class LogSearchConfigZK implements LogSearchConfig {
+  private static final Logger LOG = LoggerFactory.getLogger(LogSearchConfigZK.class);
+
+  private static final int SESSION_TIMEOUT = 15000;
+  private static final int CONNECTION_TIMEOUT = 30000;
+  private static final String DEFAULT_ZK_ROOT = "/logsearch";
+  private static final long WAIT_FOR_ROOT_SLEEP_SECONDS = 10;
+  private static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS";
+
+  private static final String CLUSTER_NAME_PROPERTY = "cluster.name";
+  private static final String ZK_CONNECT_STRING_PROPERTY = "logsearch.config.zk_connect_string";
+  private static final String ZK_ACLS_PROPERTY = "logsearch.config.zk_acls";
+  private static final String ZK_ROOT_NODE_PROPERTY = "logsearch.config.zk_root";
+
+  private Map<String, String> properties;
+  private String root;
+  private CuratorFramework client;
+  private TreeCache cache;
+  private Gson gson;
+
+  @Override
+  public void init(Component component, Map<String, String> properties) throws Exception {
+    this.properties = properties;
+    
+    LOG.info("Connecting to ZooKeeper at " + properties.get(ZK_CONNECT_STRING_PROPERTY));
+    client = CuratorFrameworkFactory.builder()
+        .connectString(properties.get(ZK_CONNECT_STRING_PROPERTY))
+        .retryPolicy(new ExponentialBackoffRetry(1000, 3))
+        .connectionTimeoutMs(CONNECTION_TIMEOUT)
+        .sessionTimeoutMs(SESSION_TIMEOUT)
+        .build();
+    client.start();
+
+    root = MapUtils.getString(properties, ZK_ROOT_NODE_PROPERTY, DEFAULT_ZK_ROOT);
+
+    if (component == Component.SERVER) {
+      if (client.checkExists().forPath(root) == null) {
+        client.create().creatingParentContainersIfNeeded().forPath(root);
+      }
+      cache = new TreeCache(client, root);
+      cache.start();
+    } else {
+      while (client.checkExists().forPath(root) == null) {
+        LOG.info("Root node is not present yet, going to sleep for " + WAIT_FOR_ROOT_SLEEP_SECONDS + " seconds");
+        Thread.sleep(WAIT_FOR_ROOT_SLEEP_SECONDS * 1000);
+      }
+
+      cache = new TreeCache(client, String.format("%s/%s", root, properties.get(CLUSTER_NAME_PROPERTY)));
+    }
+    
+    gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
+  }
+
+  @Override
+  public boolean inputConfigExists(String clusterName, String serviceName) throws Exception {
+    String nodePath = root + "/" + clusterName + "/input/" + serviceName;
+    return cache.getCurrentData(nodePath) != null;
+  }
+
+  @Override
+  public void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {
+    String nodePath = String.format("%s/%s/input/%s", root, clusterName, serviceName);
+    try {
+      client.create().creatingParentContainersIfNeeded().withACL(getAcls()).forPath(nodePath, inputConfig.getBytes());
+      LOG.info("Uploaded input config for the service " + serviceName + " for cluster " + clusterName);
+    } catch (NodeExistsException e) {
+      LOG.debug("Did not upload input config for service " + serviceName + " as it was already uploaded by another Log Feeder");
+    }
+  }
+
+  @Override
+  public void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {
+    String nodePath = String.format("%s/%s/input/%s", root, clusterName, serviceName);
+    client.setData().forPath(nodePath, inputConfig.getBytes());
+    LOG.info("Set input config for the service " + serviceName + " for cluster " + clusterName);
+  }
+
+  @Override
+  public void monitorInputConfigChanges(final InputConfigMonitor inputConfigMonitor,
+      final LogLevelFilterMonitor logLevelFilterMonitor) throws Exception {
+    final JsonParser parser = new JsonParser();
+    final JsonArray globalConfigNode = new JsonArray();
+    for (String globalConfigJsonString : inputConfigMonitor.getGlobalConfigJsons()) {
+      JsonElement globalConfigJson = parser.parse(globalConfigJsonString);
+      globalConfigNode.add(globalConfigJson.getAsJsonObject().get("global"));
+    }
+    
+    createGlobalConfigNode(globalConfigNode);
+    
+    TreeCacheListener listener = new TreeCacheListener() {
+      public void childEvent(CuratorFramework client, TreeCacheEvent event) throws Exception {
+        String nodeName = ZKPaths.getNodeFromPath(event.getData().getPath());
+        String nodeData = new String(event.getData().getData());
+        Type eventType = event.getType();
+        
+        String configPathStab = String.format("%s/%s/", root, properties.get(CLUSTER_NAME_PROPERTY));
+        
+        if (event.getData().getPath().startsWith(configPathStab + "input/")) {
+          handleInputConfigChange(eventType, nodeName, nodeData);
+        } else if (event.getData().getPath().startsWith(configPathStab + "loglevelfilter/")) {
+          handleLogLevelFilterChange(eventType, nodeName, nodeData);
+        }
+      }
+
+      private void handleInputConfigChange(Type eventType, String nodeName, String nodeData) {
+        switch (eventType) {
+          case NODE_ADDED:
+            LOG.info("Node added under input ZK node: " + nodeName);
+            addInputs(nodeName, nodeData);
+            break;
+          case NODE_UPDATED:
+            LOG.info("Node updated under input ZK node: " + nodeName);
+            removeInputs(nodeName);
+            addInputs(nodeName, nodeData);
+            break;
+          case NODE_REMOVED:
+            LOG.info("Node removed from input ZK node: " + nodeName);
+            removeInputs(nodeName);
+            break;
+          default:
+            break;
+        }
+      }
+
+      private void removeInputs(String serviceName) {
+        inputConfigMonitor.removeInputs(serviceName);
+      }
+
+      private void addInputs(String serviceName, String inputConfig) {
+        try {
+          JsonElement inputConfigJson = parser.parse(inputConfig);
+          for (Map.Entry<String, JsonElement> typeEntry : inputConfigJson.getAsJsonObject().entrySet()) {
+            for (JsonElement e : typeEntry.getValue().getAsJsonArray()) {
+              for (JsonElement globalConfig : globalConfigNode) {
+                merge(globalConfig.getAsJsonObject(), e.getAsJsonObject());
+              }
+            }
+          }
+          
+          inputConfigMonitor.loadInputConfigs(serviceName, InputConfigGson.gson.fromJson(inputConfigJson, InputConfigImpl.class));
+        } catch (Exception e) {
+          LOG.error("Could not load input configuration for service " + serviceName + ":\n" + inputConfig, e);
+        }
+      }
+
+      private void handleLogLevelFilterChange(Type eventType, String nodeName, String nodeData) {
+        switch (eventType) {
+          case NODE_ADDED:
+          case NODE_UPDATED:
+            LOG.info("Node added/updated under loglevelfilter ZK node: " + nodeName);
+            LogLevelFilter logLevelFilter = gson.fromJson(nodeData, LogLevelFilter.class);
+            logLevelFilterMonitor.setLogLevelFilter(nodeName, logLevelFilter);
+            break;
+          case NODE_REMOVED:
+            LOG.info("Node removed loglevelfilter input ZK node: " + nodeName);
+            logLevelFilterMonitor.removeLogLevelFilter(nodeName);
+            break;
+          default:
+            break;
+        }
+      }
+
+      private void merge(JsonObject source, JsonObject target) {
+        for (Map.Entry<String, JsonElement> e : source.entrySet()) {
+          if (!target.has(e.getKey())) {
+            target.add(e.getKey(), e.getValue());
+          } else {
+            if (e.getValue().isJsonObject()) {
+              JsonObject valueJson = (JsonObject)e.getValue();
+              merge(valueJson, target.get(e.getKey()).getAsJsonObject());
+            }
+          }
+        }
+      }
+    };
+    cache.getListenable().addListener(listener);
+    cache.start();
+  }
+
+  private void createGlobalConfigNode(JsonArray globalConfigNode) {
+    String globalConfigNodePath = String.format("%s/%s/global", root, properties.get(CLUSTER_NAME_PROPERTY));
+    String data = InputConfigGson.gson.toJson(globalConfigNode);
+    
+    try {
+      if (cache.getCurrentData(globalConfigNodePath) != null) {
+        client.setData().forPath(globalConfigNodePath, data.getBytes());
+      } else {
+        client.create().creatingParentContainersIfNeeded().withACL(getAcls()).forPath(globalConfigNodePath, data.getBytes());
+      }
+    } catch (Exception e) {
+      LOG.warn("Exception during global config node creation/update", e);
+    }
+  }
+
+  @Override
+  public List<String> getServices(String clusterName) {
+    String parentPath = String.format("%s/%s/input", root, clusterName);
+    Map<String, ChildData> serviceNodes = cache.getCurrentChildren(parentPath);
+    return new ArrayList<String>(serviceNodes.keySet());
+  }
+
+  @Override
+  public InputConfig getInputConfig(String clusterName, String serviceName) {
+    String globalConfigNodePath = String.format("%s/%s/global", root, clusterName);
+    String globalConfigData = new String(cache.getCurrentData(globalConfigNodePath).getData());
+    JsonArray globalConfigs = (JsonArray) new JsonParser().parse(globalConfigData);
+    InputAdapter.setGlobalConfigs(globalConfigs);
+    
+    ChildData childData = cache.getCurrentData(String.format("%s/%s/input/%s", root, clusterName, serviceName));
+    return childData == null ? null : InputConfigGson.gson.fromJson(new String(childData.getData()), InputConfigImpl.class);
+  }
+
+  @Override
+  public void createLogLevelFilter(String clusterName, String logId, LogLevelFilter filter) throws Exception {
+    String nodePath = String.format("%s/%s/loglevelfilter/%s", root, clusterName, logId);
+    String logLevelFilterJson = gson.toJson(filter);
+    try {
+      client.create().creatingParentContainersIfNeeded().withACL(getAcls()).forPath(nodePath, logLevelFilterJson.getBytes());
+      LOG.info("Uploaded log level filter for the log " + logId + " for cluster " + clusterName);
+    } catch (NodeExistsException e) {
+      LOG.debug("Did not upload log level filters for log " + logId + " as it was already uploaded by another Log Feeder");
+    }
+  }
+
+  @Override
+  public void setLogLevelFilters(String clusterName, LogLevelFilterMap filters) throws Exception {
+    for (Map.Entry<String, LogLevelFilter> e : filters.getFilter().entrySet()) {
+      String nodePath = String.format("%s/%s/loglevelfilter/%s", root, clusterName, e.getKey());
+      String logLevelFilterJson = gson.toJson(e.getValue());
+      String currentLogLevelFilterJson = new String(cache.getCurrentData(nodePath).getData());
+      if (!logLevelFilterJson.equals(currentLogLevelFilterJson)) {
+        client.setData().forPath(nodePath, logLevelFilterJson.getBytes());
+        LOG.info("Set log level filter for the log " + e.getKey() + " for cluster " + clusterName);
+      }
+    }
+  }
+
+  @Override
+  public LogLevelFilterMap getLogLevelFilters(String clusterName) {
+    String parentPath = String.format("%s/%s/loglevelfilter", root, clusterName);
+    Map<String, ChildData> logLevelFilterNodes = cache.getCurrentChildren(parentPath);
+    TreeMap<String, LogLevelFilter> filters = new TreeMap<>();
+    for (Map.Entry<String, ChildData> e : logLevelFilterNodes.entrySet()) {
+      LogLevelFilter logLevelFilter = gson.fromJson(new String(e.getValue().getData()), LogLevelFilter.class);
+      filters.put(e.getKey(), logLevelFilter);
+    }
+    
+    LogLevelFilterMap logLevelFilters = new LogLevelFilterMap();
+    logLevelFilters.setFilter(filters);
+    return logLevelFilters;
+  }
+
+  private List<ACL> getAcls() {
+    String aclStr = properties.get(ZK_ACLS_PROPERTY);
+    if (StringUtils.isBlank(aclStr)) {
+      return ZooDefs.Ids.OPEN_ACL_UNSAFE;
+    }
+
+    List<ACL> acls = new ArrayList<>();
+    List<String> aclStrList = Splitter.on(",").omitEmptyStrings().trimResults().splitToList(aclStr);
+    for (String unparcedAcl : aclStrList) {
+      String[] parts = unparcedAcl.split(":");
+      if (parts.length == 3) {
+        acls.add(new ACL(parsePermission(parts[2]), new Id(parts[0], parts[1])));
+      }
+    }
+    return acls;
+  }
+
+  private Integer parsePermission(String permission) {
+    int permissionCode = 0;
+    for (char each : permission.toLowerCase().toCharArray()) {
+      switch (each) {
+        case 'r':
+          permissionCode |= ZooDefs.Perms.READ;
+          break;
+        case 'w':
+          permissionCode |= ZooDefs.Perms.WRITE;
+          break;
+        case 'c':
+          permissionCode |= ZooDefs.Perms.CREATE;
+          break;
+        case 'd':
+          permissionCode |= ZooDefs.Perms.DELETE;
+          break;
+        case 'a':
+          permissionCode |= ZooDefs.Perms.ADMIN;
+          break;
+        default:
+          throw new IllegalArgumentException("Unsupported permission: " + permission);
+      }
+    }
+    return permissionCode;
+  }
+
+  @Override
+  public void close() {
+    LOG.info("Closing ZooKeeper Connection");
+    client.close();
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/ConditionsImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/ConditionsImpl.java
new file mode 100644
index 0000000..8bbff8f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/ConditionsImpl.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.Conditions;
+
+import com.google.gson.annotations.Expose;
+
+public class ConditionsImpl implements Conditions {
+  @Expose
+  private FieldsImpl fields;
+
+  public FieldsImpl getFields() {
+    return fields;
+  }
+
+  public void setFields(FieldsImpl fields) {
+    this.fields = fields;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FieldsImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FieldsImpl.java
new file mode 100644
index 0000000..68cd0e2
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FieldsImpl.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import java.util.Set;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.Fields;
+
+import com.google.gson.annotations.Expose;
+
+public class FieldsImpl implements Fields {
+  @Expose
+  private Set<String> type;
+
+  public Set<String> getType() {
+    return type;
+  }
+
+  public void setType(Set<String> type) {
+    this.type = type;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterAdapter.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterAdapter.java
new file mode 100644
index 0000000..b84403b
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterAdapter.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import java.lang.reflect.Type;
+
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+
+public class FilterAdapter implements JsonDeserializer<FilterDescriptorImpl> {
+  @Override
+  public FilterDescriptorImpl deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) {
+    switch (json.getAsJsonObject().get("filter").getAsString()) {
+      case "grok":
+        return (FilterDescriptorImpl)context.deserialize(json, FilterGrokDescriptorImpl.class);
+      case "keyvalue":
+        return (FilterDescriptorImpl)context.deserialize(json, FilterKeyValueDescriptorImpl.class);
+      case "json":
+        return (FilterDescriptorImpl)context.deserialize(json, FilterJsonDescriptorImpl.class);
+      default:
+        throw new IllegalArgumentException("Unknown filter type: " + json.getAsJsonObject().get("filter").getAsString());
+    }
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterDescriptorImpl.java
new file mode 100644
index 0000000..4e11715
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterDescriptorImpl.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.PostMapValues;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+public abstract class FilterDescriptorImpl implements FilterDescriptor {
+  @Expose
+  private String filter;
+
+  @Expose
+  private ConditionsImpl conditions;
+
+  @Expose
+  @SerializedName("sort_order")
+  private Integer sortOrder;
+
+  @Expose
+  @SerializedName("source_field")
+  private String sourceField;
+
+  @Expose
+  @SerializedName("remove_source_field")
+  private Boolean removeSourceField;
+
+  @Expose
+  @SerializedName("post_map_values")
+  private Map<String, List<PostMapValuesImpl>> postMapValues;
+
+  @Expose
+  @SerializedName("is_enabled")
+  private Boolean isEnabled;
+
+  public String getFilter() {
+    return filter;
+  }
+
+  public void setFilter(String filter) {
+    this.filter = filter;
+  }
+
+  public ConditionsImpl getConditions() {
+    return conditions;
+  }
+
+  public void setConditions(ConditionsImpl conditions) {
+    this.conditions = conditions;
+  }
+
+  public Integer getSortOrder() {
+    return sortOrder;
+  }
+
+  public void setSortOrder(Integer sortOrder) {
+    this.sortOrder = sortOrder;
+  }
+
+  public String getSourceField() {
+    return sourceField;
+  }
+
+  public void setSourceField(String sourceField) {
+    this.sourceField = sourceField;
+  }
+
+  public Boolean isRemoveSourceField() {
+    return removeSourceField;
+  }
+
+  public void setRemoveSourceField(Boolean removeSourceField) {
+    this.removeSourceField = removeSourceField;
+  }
+
+  public Map<String, ? extends List<? extends PostMapValues>> getPostMapValues() {
+    return postMapValues;
+  }
+
+  public void setPostMapValues(Map<String, List<PostMapValuesImpl>> postMapValues) {
+    this.postMapValues = postMapValues;
+  }
+
+  public Boolean isEnabled() {
+    return isEnabled;
+  }
+
+  public void setIsEnabled(Boolean isEnabled) {
+    this.isEnabled = isEnabled;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterGrokDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterGrokDescriptorImpl.java
new file mode 100644
index 0000000..7f40b7f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterGrokDescriptorImpl.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterGrokDescriptor;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+public class FilterGrokDescriptorImpl extends FilterDescriptorImpl implements FilterGrokDescriptor {
+  @Expose
+  @SerializedName("log4j_format")
+  private String log4jFormat;
+
+  @Expose
+  @SerializedName("multiline_pattern")
+  private String multilinePattern;
+
+  @Expose
+  @SerializedName("message_pattern")
+  private String messagePattern;
+
+  @Override
+  public String getLog4jFormat() {
+    return log4jFormat;
+  }
+
+  public void setLog4jFormat(String log4jFormat) {
+    this.log4jFormat = log4jFormat;
+  }
+
+  @Override
+  public String getMultilinePattern() {
+    return multilinePattern;
+  }
+
+  public void setMultilinePattern(String multilinePattern) {
+    this.multilinePattern = multilinePattern;
+  }
+
+  @Override
+  public String getMessagePattern() {
+    return messagePattern;
+  }
+
+  public void setMessagePattern(String messagePattern) {
+    this.messagePattern = messagePattern;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterJsonDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterJsonDescriptorImpl.java
new file mode 100644
index 0000000..9bf1a2b
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterJsonDescriptorImpl.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterJsonDescriptor;
+
+public class FilterJsonDescriptorImpl extends FilterDescriptorImpl implements FilterJsonDescriptor {
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterKeyValueDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterKeyValueDescriptorImpl.java
new file mode 100644
index 0000000..8e89990
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/FilterKeyValueDescriptorImpl.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterKeyValueDescriptor;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+public class FilterKeyValueDescriptorImpl extends FilterDescriptorImpl implements FilterKeyValueDescriptor {
+  @Expose
+  @SerializedName("field_split")
+  private String fieldSplit;
+
+  @Expose
+  @SerializedName("value_split")
+  private String valueSplit;
+
+  @Expose
+  @SerializedName("value_borders")
+  private String valueBorders;
+
+  public String getFieldSplit() {
+    return fieldSplit;
+  }
+
+  public void setFieldSplit(String fieldSplit) {
+    this.fieldSplit = fieldSplit;
+  }
+
+  public String getValueSplit() {
+    return valueSplit;
+  }
+
+  public void setValueSplit(String valueSplit) {
+    this.valueSplit = valueSplit;
+  }
+
+  public String getValueBorders() {
+    return valueBorders;
+  }
+
+  public void setValueBorders(String valueBorders) {
+    this.valueBorders = valueBorders;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputAdapter.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputAdapter.java
new file mode 100644
index 0000000..86741c6
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputAdapter.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import java.lang.reflect.Type;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+
+public class InputAdapter implements JsonDeserializer<InputDescriptorImpl> {
+  private static JsonArray globalConfigs;
+  public static void setGlobalConfigs(JsonArray globalConfigs_) {
+    globalConfigs = globalConfigs_;
+  }
+  
+  @Override
+  public InputDescriptorImpl deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) {
+    String source = null;
+    if (json.getAsJsonObject().has("source")) {
+      source = json.getAsJsonObject().get("source").getAsString();
+    } else {
+      for (JsonElement e : globalConfigs) {
+        if (e.getAsJsonObject().has("source")) {
+          source = e.getAsJsonObject().get("source").getAsString();
+          break;
+        }
+      }
+    }
+    
+    switch (source) {
+      case "file":
+        return (InputDescriptorImpl)context.deserialize(json, InputFileDescriptorImpl.class);
+      case "s3_file":
+        return (InputDescriptorImpl)context.deserialize(json, InputS3FileDescriptorImpl.class);
+      default:
+        throw new IllegalArgumentException("Unknown input type: " + json.getAsJsonObject().get("source").getAsString());
+    }
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputConfigGson.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputConfigGson.java
new file mode 100644
index 0000000..3b78aff
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputConfigGson.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import java.lang.reflect.Type;
+import java.util.List;
+
+import com.google.common.reflect.TypeToken;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+/**
+ * Helper class to convert betweeb json string and InputConfig class.
+ */
+public class InputConfigGson {
+  public static Gson gson;
+  static {
+    Type inputType = new TypeToken<InputDescriptorImpl>() {}.getType();
+    Type filterType = new TypeToken<FilterDescriptorImpl>() {}.getType();
+    Type postMapValuesType = new TypeToken<List<PostMapValuesImpl>>() {}.getType();
+    gson = new GsonBuilder()
+        .registerTypeAdapter(inputType, new InputAdapter())
+        .registerTypeAdapter(filterType, new FilterAdapter())
+        .registerTypeAdapter(postMapValuesType, new PostMapValuesAdapter())
+        .setPrettyPrinting()
+        .excludeFieldsWithoutExposeAnnotation()
+        .create();
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputConfigImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputConfigImpl.java
new file mode 100644
index 0000000..a4eba8e
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputConfigImpl.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import java.util.List;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor;
+
+import com.google.gson.annotations.Expose;
+
+public class InputConfigImpl implements InputConfig {
+  @Expose
+  private List<InputDescriptorImpl> input;
+
+  @Expose
+  private List<FilterDescriptorImpl> filter;
+
+  @Override
+  public List<? extends InputDescriptor> getInput() {
+    return input;
+  }
+
+  public void setInput(List<InputDescriptorImpl> input) {
+    this.input = input;
+  }
+
+  @Override
+  public List<? extends FilterDescriptor> getFilter() {
+    return filter;
+  }
+
+  public void setFilter(List<FilterDescriptorImpl> filter) {
+    this.filter = filter;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputDescriptorImpl.java
new file mode 100644
index 0000000..94dcc2a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputDescriptorImpl.java
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+public abstract class InputDescriptorImpl implements InputDescriptor {
+  @Expose
+  private String type;
+
+  @Expose
+  private String rowtype;
+
+  @Expose
+  private String path;
+
+  @Expose
+  @SerializedName("add_fields")
+  private Map<String, String> addFields;
+  
+  @Expose
+  private String source;
+  
+  @Expose
+  private Boolean tail;
+  
+  @Expose
+  @SerializedName("gen_event_md5")
+  private Boolean genEventMd5;
+  
+  @Expose
+  @SerializedName("use_event_md5_as_id")
+  private Boolean useEventMd5AsId;
+  
+  @Expose
+  @SerializedName("start_position")
+  private String startPosition;
+
+  @Expose
+  @SerializedName("cache_enabled")
+  private Boolean cacheEnabled;
+
+  @Expose
+  @SerializedName("cache_key_field")
+  private String cacheKeyField;
+
+  @Expose
+  @SerializedName("cache_last_dedup_enabled")
+  private Boolean cacheLastDedupEnabled;
+
+  @Expose
+  @SerializedName("cache_size")
+  private Integer cacheSize;
+
+  @Expose
+  @SerializedName("cache_dedup_interval")
+  private Long cacheDedupInterval;
+
+  @Expose
+  @SerializedName("is_enabled")
+  private Boolean isEnabled;
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public String getRowtype() {
+    return rowtype;
+  }
+
+  public void setRowtype(String rowType) {
+    this.rowtype = rowType;
+  }
+
+  public String getPath() {
+    return path;
+  }
+
+  public void setPath(String path) {
+    this.path = path;
+  }
+
+  public Map<String, String> getAddFields() {
+    return addFields;
+  }
+
+  public void setAddFields(Map<String, String> addFields) {
+    this.addFields = addFields;
+  }
+
+  public String getSource() {
+    return source;
+  }
+
+  public void setSource(String source) {
+    this.source = source;
+  }
+
+  public Boolean isTail() {
+    return tail;
+  }
+
+  public void setTail(Boolean tail) {
+    this.tail = tail;
+  }
+
+  public Boolean isGenEventMd5() {
+    return genEventMd5;
+  }
+
+  public void setGenEventMd5(Boolean genEventMd5) {
+    this.genEventMd5 = genEventMd5;
+  }
+
+  public Boolean isUseEventMd5AsId() {
+    return useEventMd5AsId;
+  }
+
+  public void setUseEventMd5AsId(Boolean useEventMd5AsId) {
+    this.useEventMd5AsId = useEventMd5AsId;
+  }
+
+  public String getStartPosition() {
+    return startPosition;
+  }
+
+  public void setStartPosition(String startPosition) {
+    this.startPosition = startPosition;
+  }
+
+  public Boolean isCacheEnabled() {
+    return cacheEnabled;
+  }
+
+  public void setCacheEnabled(Boolean cacheEnabled) {
+    this.cacheEnabled = cacheEnabled;
+  }
+
+  public String getCacheKeyField() {
+    return cacheKeyField;
+  }
+
+  public void setCacheKeyField(String cacheKeyField) {
+    this.cacheKeyField = cacheKeyField;
+  }
+
+  public Boolean getCacheLastDedupEnabled() {
+    return cacheLastDedupEnabled;
+  }
+
+  public void setCacheLastDedupEnabled(Boolean cacheLastDedupEnabled) {
+    this.cacheLastDedupEnabled = cacheLastDedupEnabled;
+  }
+
+  public Integer getCacheSize() {
+    return cacheSize;
+  }
+
+  public void setCacheSize(Integer cacheSize) {
+    this.cacheSize = cacheSize;
+  }
+
+  public Long getCacheDedupInterval() {
+    return cacheDedupInterval;
+  }
+
+  public void setCacheDedupInterval(Long cacheDedupInterval) {
+    this.cacheDedupInterval = cacheDedupInterval;
+  }
+
+  public Boolean isEnabled() {
+    return isEnabled;
+  }
+
+  public void setIsEnabled(Boolean isEnabled) {
+    this.isEnabled = isEnabled;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputFileBaseDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputFileBaseDescriptorImpl.java
new file mode 100644
index 0000000..51c7ec8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputFileBaseDescriptorImpl.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileBaseDescriptor;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+public class InputFileBaseDescriptorImpl extends InputDescriptorImpl implements InputFileBaseDescriptor {
+  @Expose
+  @SerializedName("checkpoint_interval_ms")
+  private Integer checkpointIntervalMs;
+
+  @Expose
+  @SerializedName("process_file")
+  private Boolean processFile;
+
+  @Expose
+  @SerializedName("copy_file")
+  private Boolean copyFile;
+
+  @Override
+  public Boolean getProcessFile() {
+    return processFile;
+  }
+
+  public void setProcessFile(Boolean processFile) {
+    this.processFile = processFile;
+  }
+
+  @Override
+  public Boolean getCopyFile() {
+    return copyFile;
+  }
+
+  public void setCopyFile(Boolean copyFile) {
+    this.copyFile = copyFile;
+  }
+
+  @Override
+  public Integer getCheckpointIntervalMs() {
+    return checkpointIntervalMs;
+  }
+
+  public void setCheckpointIntervalMs(Integer checkpointIntervalMs) {
+    this.checkpointIntervalMs = checkpointIntervalMs;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputFileDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputFileDescriptorImpl.java
new file mode 100644
index 0000000..3bfd161
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputFileDescriptorImpl.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileDescriptor;
+
+public class InputFileDescriptorImpl extends InputFileBaseDescriptorImpl implements InputFileDescriptor {
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputS3FileDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputS3FileDescriptorImpl.java
new file mode 100644
index 0000000..277a57c
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/InputS3FileDescriptorImpl.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputS3FileDescriptor;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+public class InputS3FileDescriptorImpl extends InputFileBaseDescriptorImpl implements InputS3FileDescriptor {
+  @Expose
+  @SerializedName("s3_access_key")
+  private String s3AccessKey;
+
+  @Expose
+  @SerializedName("s3_secret_key")
+  private String s3SecretKey;
+
+  @Override
+  public String getS3AccessKey() {
+    return s3AccessKey;
+  }
+
+  public void setS3AccessKey(String s3AccessKey) {
+    this.s3AccessKey = s3AccessKey;
+  }
+
+  @Override
+  public String getS3SecretKey() {
+    return s3SecretKey;
+  }
+
+  public void setS3SecretKey(String s3SecretKey) {
+    this.s3SecretKey = s3SecretKey;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapDateDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapDateDescriptorImpl.java
new file mode 100644
index 0000000..9daad2b
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapDateDescriptorImpl.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.MapDateDescriptor;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+public class MapDateDescriptorImpl implements MapDateDescriptor {
+  @Override
+  public String getJsonName() {
+    return "map_date";
+  }
+
+  @Expose
+  @SerializedName("source_date_pattern")
+  private String sourceDatePattern;
+
+  @Expose
+  @SerializedName("target_date_pattern")
+  private String targetDatePattern;
+
+  @Override
+  public String getSourceDatePattern() {
+    return sourceDatePattern;
+  }
+
+  public void setSourceDatePattern(String sourceDatePattern) {
+    this.sourceDatePattern = sourceDatePattern;
+  }
+
+  @Override
+  public String getTargetDatePattern() {
+    return targetDatePattern;
+  }
+
+  public void setTargetDatePattern(String targetDatePattern) {
+    this.targetDatePattern = targetDatePattern;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapFieldCopyDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapFieldCopyDescriptorImpl.java
new file mode 100644
index 0000000..4a8d746
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapFieldCopyDescriptorImpl.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldCopyDescriptor;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+public class MapFieldCopyDescriptorImpl implements MapFieldCopyDescriptor {
+  @Override
+  public String getJsonName() {
+    return "map_fieldcopy";
+  }
+
+  @Expose
+  @SerializedName("copy_name")
+  private String copyName;
+
+  @Override
+  public String getCopyName() {
+    return copyName;
+  }
+
+  public void setCopyName(String copyName) {
+    this.copyName = copyName;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapFieldNameDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapFieldNameDescriptorImpl.java
new file mode 100644
index 0000000..333cb67
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapFieldNameDescriptorImpl.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldNameDescriptor;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+public class MapFieldNameDescriptorImpl implements MapFieldNameDescriptor {
+  @Override
+  public String getJsonName() {
+    return "map_fieldname";
+  }
+
+  @Expose
+  @SerializedName("new_fieldname")
+  private String newFieldName;
+
+  @Override
+  public String getNewFieldName() {
+    return newFieldName;
+  }
+
+  public void setNewFieldName(String newFieldName) {
+    this.newFieldName = newFieldName;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapFieldValueDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapFieldValueDescriptorImpl.java
new file mode 100644
index 0000000..599e152
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/MapFieldValueDescriptorImpl.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldValueDescriptor;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+public class MapFieldValueDescriptorImpl implements MapFieldValueDescriptor {
+  @Override
+  public String getJsonName() {
+    return "map_fieldvalue";
+  }
+
+  @Expose
+  @SerializedName("pre_value")
+  private String preValue;
+
+  @Expose
+  @SerializedName("post_value")
+  private String postValue;
+
+  @Override
+  public String getPreValue() {
+    return preValue;
+  }
+
+  public void setPreValue(String preValue) {
+    this.preValue = preValue;
+  }
+
+  @Override
+  public String getPostValue() {
+    return postValue;
+  }
+
+  public void setPostValue(String postValue) {
+    this.postValue = postValue;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/PostMapValuesAdapter.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/PostMapValuesAdapter.java
new file mode 100644
index 0000000..32aded8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/PostMapValuesAdapter.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import java.lang.reflect.Type;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonSerializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonSerializationContext;
+
+public class PostMapValuesAdapter implements JsonDeserializer<List<PostMapValuesImpl>>, JsonSerializer<List<PostMapValuesImpl>> {
+  @Override
+  public List<PostMapValuesImpl> deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) {
+    List<PostMapValuesImpl> vals = new ArrayList<>();
+    if (json.isJsonArray()) {
+      for (JsonElement e : json.getAsJsonArray()) {
+        vals.add(createPostMapValues(e, context));
+      }
+    } else if (json.isJsonObject()) {
+      vals.add(createPostMapValues(json, context));
+    } else {
+      throw new RuntimeException("Unexpected JSON type: " + json.getClass());
+    }
+    return vals;
+  }
+
+  private PostMapValuesImpl createPostMapValues(JsonElement e, JsonDeserializationContext context) {
+    List<MapFieldDescriptor> mappers = new ArrayList<>();
+    for (Map.Entry<String, JsonElement> m : e.getAsJsonObject().entrySet()) {
+      switch (m.getKey()) {
+        case "map_date":
+          mappers.add((MapDateDescriptorImpl)context.deserialize(m.getValue(), MapDateDescriptorImpl.class));
+          break;
+        case "map_fieldcopy":
+          mappers.add((MapFieldCopyDescriptorImpl)context.deserialize(m.getValue(), MapFieldCopyDescriptorImpl.class));
+          break;
+        case "map_fieldname":
+          mappers.add((MapFieldNameDescriptorImpl)context.deserialize(m.getValue(), MapFieldNameDescriptorImpl.class));
+          break;
+        case "map_fieldvalue":
+          mappers.add((MapFieldValueDescriptorImpl)context.deserialize(m.getValue(), MapFieldValueDescriptorImpl.class));
+          break;
+        default:
+          System.out.println("Unknown key: " + m.getKey());
+      }
+    }
+    
+    PostMapValuesImpl postMapValues = new PostMapValuesImpl();
+    postMapValues.setMappers(mappers);
+    return postMapValues;
+  }
+
+  @Override
+  public JsonElement serialize(List<PostMapValuesImpl> src, Type typeOfSrc, JsonSerializationContext context) {
+    if (src.size() == 1) {
+      return createMapperObject(src.get(0), context);
+    } else {
+      JsonArray jsonArray = new JsonArray();
+      for (PostMapValuesImpl postMapValues : src) {
+        jsonArray.add(createMapperObject(postMapValues, context));
+      }
+      return jsonArray;
+    }
+  }
+
+  private JsonElement createMapperObject(PostMapValuesImpl postMapValues, JsonSerializationContext context) {
+    JsonObject jsonObject = new JsonObject();
+    for (MapFieldDescriptor m : postMapValues.getMappers()) {
+      jsonObject.add(((MapFieldDescriptor)m).getJsonName(), context.serialize(m));
+    }
+    return jsonObject;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/PostMapValuesImpl.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/PostMapValuesImpl.java
new file mode 100644
index 0000000..4d2254a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/model/inputconfig/impl/PostMapValuesImpl.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl;
+
+import java.util.List;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.PostMapValues;
+
+import com.google.gson.annotations.Expose;
+
+public class PostMapValuesImpl implements PostMapValues {
+  @Expose
+  private List<MapFieldDescriptor> mappers;
+
+  public List<MapFieldDescriptor> getMappers() {
+    return mappers;
+  }
+
+  public void setMappers(List<MapFieldDescriptor> mappers) {
+    this.mappers = mappers;
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-it/pom.xml b/ambari-logsearch/ambari-logsearch-it/pom.xml
index be7ab57..81af9e8 100644
--- a/ambari-logsearch/ambari-logsearch-it/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-it/pom.xml
@@ -33,8 +33,14 @@
   <properties>
     <it.skip>true</it.skip>
     <jbehave.version>4.0.5</jbehave.version>
+    <jbehave-selenium>3.5.5</jbehave-selenium>
     <jersey.version>2.23.1</jersey.version>
     <jackson-jaxrs.version>2.6.4</jackson-jaxrs.version>
+    <failsafe-plugin.version>2.20</failsafe-plugin.version>
+    <forkCount>1</forkCount>
+    <docker.host>localhost</docker.host>
+    <backend.stories.location>NONE</backend.stories.location>
+    <ui.stories.location>NONE</ui.stories.location>
   </properties>
 
   <dependencies>
@@ -44,6 +50,11 @@
       <version>${jbehave.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.jbehave.web</groupId>
+      <artifactId>jbehave-web-selenium</artifactId>
+      <version>${jbehave-selenium}</version>
+    </dependency>
+    <dependency>
       <groupId>org.apache.solr</groupId>
       <artifactId>solr-solrj</artifactId>
       <version>${solr.version}</version>
@@ -58,11 +69,6 @@
       <version>2.5</version>
     </dependency>
     <dependency>
-      <groupId>com.github.docker-java</groupId>
-      <artifactId>docker-java</artifactId>
-      <version>3.0.0</version>
-    </dependency>
-    <dependency>
       <groupId>com.fasterxml.jackson.jaxrs</groupId>
       <artifactId>jackson-jaxrs-json-provider</artifactId>
       <version>${jackson-jaxrs.version}</version>
@@ -86,6 +92,32 @@
       <groupId>com.flipkart.zjsonpatch</groupId>
       <artifactId>zjsonpatch</artifactId>
       <version>0.2.4</version>
+      <exclusions>
+        <exclusion>
+          <groupId>com.google.guava</groupId>
+          <artifactId>guava</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-server</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-web</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-logfeeder</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <version>11.0.1</version>
     </dependency>
   </dependencies>
 
@@ -102,35 +134,139 @@
         <directory>src/test/resources</directory>
       </testResource>
     </testResources>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-failsafe-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>run-integration-tests</id>
-            <phase>integration-test</phase>
-            <goals>
-              <goal>integration-test</goal>
-            </goals>
-            <configuration>
-              <includes>
-                <include>**/*Stories.java</include>
-                <include>**/*Story.java</include>
-              </includes>
-              <skip>${it.skip}</skip>
-            </configuration>
-          </execution>
-          <execution>
-            <id>verify-integration-tests</id>
-            <phase>verify</phase>
-            <goals>
-              <goal>verify</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
   </build>
 
+  <profiles>
+    <profile>
+      <id>selenium-tests</id>
+      <activation>
+        <property>
+          <name>selenium-tests</name>
+        </property>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-failsafe-plugin</artifactId>
+            <version>${failsafe-plugin.version}</version>
+            <executions>
+              <execution>
+                <id>run-integration-tests</id>
+                <phase>integration-test</phase>
+                <goals>
+                  <goal>integration-test</goal>
+                </goals>
+                <configuration>
+                  <includes>
+                    <include>**/*UIStories.java</include>
+                  </includes>
+                  <systemPropertyVariables>
+                    <log4j.configuration>file:${project.build.testOutputDirectory}/log4j.properties</log4j.configuration>
+                    <docker.host>${docker.host}</docker.host>
+                    <ui.stories.location>${ui.stories.location}</ui.stories.location>
+                  </systemPropertyVariables>
+                </configuration>
+              </execution>
+              <execution>
+                <id>verify-integration-tests</id>
+                <phase>verify</phase>
+                <goals>
+                  <goal>verify</goal>
+                </goals>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+    <profile>
+      <id>backend-tests</id>
+      <activation>
+        <property>
+          <name>backend-tests</name>
+        </property>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-failsafe-plugin</artifactId>
+            <version>${failsafe-plugin.version}</version>
+            <executions>
+              <execution>
+                <id>run-integration-tests</id>
+                <phase>integration-test</phase>
+                <goals>
+                  <goal>integration-test</goal>
+                </goals>
+                <configuration>
+                  <includes>
+                    <include>**/*BackendStories.java</include>
+                  </includes>
+                  <systemPropertyVariables>
+                    <log4j.configuration>file:${project.build.testOutputDirectory}/log4j.properties</log4j.configuration>
+                    <docker.host>${docker.host}</docker.host>
+                    <backend.stories.location>${backend.stories.location}</backend.stories.location>
+                  </systemPropertyVariables>
+                </configuration>
+              </execution>
+              <execution>
+                <id>verify-integration-tests</id>
+                <phase>verify</phase>
+                <goals>
+                  <goal>verify</goal>
+                </goals>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+    <profile>
+      <id>all-tests</id>
+      <activation>
+        <property>
+          <name>all-tests</name>
+        </property>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-failsafe-plugin</artifactId>
+            <version>${failsafe-plugin.version}</version>
+            <executions>
+              <execution>
+                <id>run-integration-tests</id>
+                <phase>integration-test</phase>
+                <goals>
+                  <goal>integration-test</goal>
+                </goals>
+                <configuration>
+                  <includes>
+                    <include>**/*Stories.java</include>
+                  </includes>
+                  <systemPropertyVariables>
+                    <log4j.configuration>file:${project.build.testOutputDirectory}/log4j.properties</log4j.configuration>
+                    <docker.host>${docker.host}</docker.host>
+                    <backend.stories.location>${backend.stories.location}</backend.stories.location>
+                    <ui.stories.location>${ui.stories.location}</ui.stories.location>
+                  </systemPropertyVariables>
+                </configuration>
+              </execution>
+              <execution>
+                <id>verify-integration-tests</id>
+                <phase>verify</phase>
+                <goals>
+                  <goal>verify</goal>
+                </goals>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
 </project>
\ No newline at end of file
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/domain/StoryDataRegistry.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/domain/StoryDataRegistry.java
index 564972a..41d6391 100644
--- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/domain/StoryDataRegistry.java
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/domain/StoryDataRegistry.java
@@ -18,24 +18,23 @@
  */
 package org.apache.ambari.logsearch.domain;
 
-import com.github.dockerjava.api.DockerClient;
-import com.github.dockerjava.core.DockerClientConfig;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.SolrClient;
+import org.jbehave.web.selenium.WebDriverProvider;
 
 public class StoryDataRegistry {
   public static final StoryDataRegistry INSTANCE = new StoryDataRegistry();
 
-  private DockerClient dockerClient;
-  private DockerClientConfig dockerClientConfig;
-  private CloudSolrClient cloudSolrClient;
+  private SolrClient solrClient;
   private boolean logsearchContainerStarted = false;
   private String dockerHost;
   private String ambariFolder;
+  private String shellScriptLocation;
   private final int solrPort = 8886;
   private final int logsearchPort = 61888;
   private final int zookeeperPort = 9983;
   private final String serviceLogsCollection = "hadoop_logs";
   private final String auditLogsCollection = "audit_logs";
+  private WebDriverProvider webDriverProvider;
 
   private StoryDataRegistry() {
   }
@@ -60,14 +59,6 @@
     return zookeeperPort;
   }
 
-  public DockerClient getDockerClient() {
-    return dockerClient;
-  }
-
-  public void setDockerClient(DockerClient dockerClient) {
-    this.dockerClient = dockerClient;
-  }
-
   public String getServiceLogsCollection() {
     return serviceLogsCollection;
   }
@@ -76,12 +67,12 @@
     return auditLogsCollection;
   }
 
-  public CloudSolrClient getCloudSolrClient() {
-    return cloudSolrClient;
+  public SolrClient getSolrClient() {
+    return solrClient;
   }
 
-  public void setCloudSolrClient(CloudSolrClient cloudSolrClient) {
-    this.cloudSolrClient = cloudSolrClient;
+  public void setSolrClient(SolrClient solrClient) {
+    this.solrClient = solrClient;
   }
 
   public String getAmbariFolder() {
@@ -92,12 +83,12 @@
     this.ambariFolder = ambariFolder;
   }
 
-  public DockerClientConfig getDockerClientConfig() {
-    return dockerClientConfig;
+  public String getShellScriptLocation() {
+    return shellScriptLocation;
   }
 
-  public void setDockerClientConfig(DockerClientConfig dockerClientConfig) {
-    this.dockerClientConfig = dockerClientConfig;
+  public void setShellScriptLocation(String shellScriptLocation) {
+    this.shellScriptLocation = shellScriptLocation;
   }
 
   public boolean isLogsearchContainerStarted() {
@@ -107,4 +98,12 @@
   public void setLogsearchContainerStarted(boolean logsearchContainerStarted) {
     this.logsearchContainerStarted = logsearchContainerStarted;
   }
+
+  public WebDriverProvider getWebDriverProvider() {
+    return webDriverProvider;
+  }
+
+  public void setWebDriverProvider(WebDriverProvider webDriverProvider) {
+    this.webDriverProvider = webDriverProvider;
+  }
 }
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/AbstractLogSearchSteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/AbstractLogSearchSteps.java
new file mode 100644
index 0000000..a0027ae
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/AbstractLogSearchSteps.java
@@ -0,0 +1,161 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.steps;
+
+import org.apache.ambari.logsearch.domain.StoryDataRegistry;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.client.solrj.response.SolrPingResponse;
+import org.apache.solr.common.SolrDocumentList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.net.URL;
+
+public class AbstractLogSearchSteps {
+
+  private static final Logger LOG = LoggerFactory.getLogger(AbstractLogSearchSteps.class);
+
+  protected void initDockerContainer() throws Exception{
+    boolean logsearchStarted = StoryDataRegistry.INSTANCE.isLogsearchContainerStarted();
+    if (!logsearchStarted) {
+      LOG.info("Create new docker container for Log Search ...");
+      URL location = LogSearchDockerSteps.class.getProtectionDomain().getCodeSource().getLocation();
+      String ambariFolder = new File(location.toURI()).getParentFile().getParentFile().getParentFile().getParent();
+      StoryDataRegistry.INSTANCE.setAmbariFolder(ambariFolder);
+      String shellScriptLocation = ambariFolder + "/ambari-logsearch/docker/logsearch-docker.sh";
+      StoryDataRegistry.INSTANCE.setShellScriptLocation(shellScriptLocation);
+      String output = runCommand(new String[]{StoryDataRegistry.INSTANCE.getShellScriptLocation(), "start"});
+      LOG.info("Command output: {}", output);
+      StoryDataRegistry.INSTANCE.setLogsearchContainerStarted(true);
+
+      String dockerHostFromUri = System.getProperty("docker.host") != null ? System.getProperty("docker.host") : "localhost";;
+
+      StoryDataRegistry.INSTANCE.setDockerHost(dockerHostFromUri);
+      checkHostAndPortReachable(dockerHostFromUri, StoryDataRegistry.INSTANCE.getLogsearchPort(), "LogSearch");
+      waitUntilSolrIsUp();
+      waitUntilSolrHasAnyData();
+
+      LOG.info("Waiting for logfeeder to finish the test log parsings... (10 sec)");
+      Thread.sleep(10000);
+    }
+  }
+
+  private void waitUntilSolrIsUp() throws Exception {
+    int maxTries = 30;
+    boolean solrIsUp = false;
+    String lastExceptionMessage = null;
+    for (int tries = 1; tries < maxTries; tries++) {
+      try {
+        SolrClient solrClient = new LBHttpSolrClient(String.format("http://%s:%d/solr/%s_shard0_replica1",
+          StoryDataRegistry.INSTANCE.getDockerHost(),
+          StoryDataRegistry.INSTANCE.getSolrPort(),
+          StoryDataRegistry.INSTANCE.getServiceLogsCollection()));
+        StoryDataRegistry.INSTANCE.setSolrClient(solrClient);
+        SolrPingResponse pingResponse = solrClient.ping();
+        if (pingResponse.getStatus() != 0) {
+          LOG.info("Solr is not up yet, Retrying... ({} tries)", tries);
+          Thread.sleep(2000);
+        } else {
+          solrIsUp = true;
+          LOG.info("Solr is up and running");
+          break;
+        }
+      } catch (Exception e) {
+        LOG.info("Error occurred during pinging solr. Retrying... ({} tries)", tries);
+        lastExceptionMessage = e.getMessage();
+        Thread.sleep(2000);
+      }
+    }
+
+    if (!solrIsUp) {
+      throw new IllegalStateException(String.format("Solr is not up after %d tries. Exception: %s", maxTries, lastExceptionMessage));
+    }
+  }
+
+  protected void waitUntilSolrHasAnyData() throws IOException, SolrServerException, InterruptedException {
+    boolean solrHasData = false;
+    int maxTries = 60;
+    String lastExceptionMessage = null;
+    for (int tries = 1; tries < maxTries; tries++) {
+      try {
+        SolrClient solrClient = StoryDataRegistry.INSTANCE.getSolrClient();
+        SolrQuery solrQuery = new SolrQuery();
+        solrQuery.setQuery("*:*");
+        QueryResponse queryResponse = solrClient.query(solrQuery);
+        SolrDocumentList list = queryResponse.getResults();
+        if (list.size() > 0) {
+          solrHasData = true;
+          break;
+        } else {
+          Thread.sleep(2000);
+          LOG.info("Solr has no data yet. Retrying... ({} tries)", tries);
+        }
+      } catch (Exception e) {
+        LOG.info("Error occurred during checking solr. Retrying... ({} tries)", tries);
+        lastExceptionMessage = e.getMessage();
+        Thread.sleep(2000);
+      }
+    }
+    if (!solrHasData) {
+      throw new IllegalStateException(String.format("Solr has no data after %d tries. Exception: %s", maxTries, lastExceptionMessage));
+    }
+  }
+
+
+  protected void checkHostAndPortReachable(String host, int port, String serviceName) throws InterruptedException {
+    boolean reachable = false;
+    int maxTries = 60;
+    for (int tries = 1; tries < maxTries; tries++ ) {
+      try (Socket socket = new Socket()) {
+        socket.connect(new InetSocketAddress(host, port), 1000);
+        reachable = true;
+        break;
+      } catch (IOException e) {
+        Thread.sleep(2000);
+        LOG.info("{} is not reachable yet. Retrying... ({} tries)", serviceName, tries);
+      }
+    }
+    if (!reachable) {
+      throw new IllegalStateException(String.format("%s is not reachable after %s tries", serviceName, maxTries));
+    }
+  }
+
+
+  protected String runCommand(String[] command) {
+    try {
+      LOG.info("Exec command: {}", StringUtils.join(command, " "));
+      Process process = Runtime.getRuntime().exec(command);
+      BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
+      return reader.readLine();
+    } catch (Exception e) {
+      throw new RuntimeException("Error during execute shell command: ", e);
+    }
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchDockerSteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchDockerSteps.java
index 5f8f9bf..cb67fcc 100644
--- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchDockerSteps.java
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchDockerSteps.java
@@ -18,23 +18,13 @@
  */
 package org.apache.ambari.logsearch.steps;
 
-import com.github.dockerjava.api.DockerClient;
-import com.github.dockerjava.api.command.CreateContainerResponse;
-import com.github.dockerjava.api.model.Bind;
-import com.github.dockerjava.api.model.Container;
-import com.github.dockerjava.api.model.ExposedPort;
-import com.github.dockerjava.api.model.Ports;
-import com.github.dockerjava.api.model.Volume;
-import com.github.dockerjava.core.DockerClientBuilder;
-import com.github.dockerjava.core.DockerClientConfig;
-import com.github.dockerjava.core.command.BuildImageResultCallback;
-import com.google.common.base.Preconditions;
 import org.apache.ambari.logsearch.domain.StoryDataRegistry;
-import org.apache.commons.lang.ArrayUtils;
+import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
 import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.client.solrj.response.SolrPingResponse;
 import org.apache.solr.common.SolrDocumentList;
 import org.jbehave.core.annotations.AfterStories;
 import org.jbehave.core.annotations.BeforeStories;
@@ -43,202 +33,35 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
+import java.io.InputStreamReader;
 import java.net.InetSocketAddress;
 import java.net.Socket;
 import java.net.URL;
-import java.util.List;
 
-public class LogSearchDockerSteps {
+public class LogSearchDockerSteps extends AbstractLogSearchSteps {
 
   private static final Logger LOG = LoggerFactory.getLogger(LogSearchDockerSteps.class);
 
   @Given("logsearch docker container")
   public void setupLogSearchContainer() throws Exception {
-    boolean logsearchStarted = StoryDataRegistry.INSTANCE.isLogsearchContainerStarted();
-    if (!logsearchStarted) {
-      DockerClient dockerClient = StoryDataRegistry.INSTANCE.getDockerClient();
-      LOG.info("Create new docker container for Log Search ..");
-      URL location = LogSearchDockerSteps.class.getProtectionDomain().getCodeSource().getLocation();
-      String ambariFolder = new File(location.toURI()).getParentFile().getParentFile().getParentFile().getParent();
-      StoryDataRegistry.INSTANCE.setAmbariFolder(ambariFolder);
-      String dockerBaseDirectory = ambariFolder + "/ambari-logsearch/docker";
-      String dockerFileLocation = dockerBaseDirectory + "/Dockerfile";
-
-      String imageId = dockerClient.buildImageCmd()
-        .withTag("ambari-logsearch:v1.0")
-        .withBaseDirectory(new File(dockerBaseDirectory))
-        .withDockerfile(new File(dockerFileLocation))
-        .exec(new BuildImageResultCallback())
-        .awaitImageId();
-      LOG.info("Docker image id: {}", imageId);
-
-      removeLogSearchContainerIfExists();
-
-      // volume bindings
-      Volume testLogsVolume = new Volume("/root/test-logs");
-      Volume testConfigVolume = new Volume("/root/test-config");
-      Volume ambariVolume = new Volume("/root/ambari");
-      Volume logfeederClassesVolume = new Volume("/root/ambari/ambari-logsearch/ambari-logsearch-logfeeder/target/package/classes");
-      Volume logsearchClassesVolume = new Volume("/root/ambari/ambari-logsearch/ambari-logsearch-portal/target/package/classes");
-      Volume logsearchWebappVolume = new Volume("/root/ambari/ambari-logsearch/ambari-logsearch-portal/target/package/classes/webapps/app");
-      Bind testLogsBind = new Bind(ambariFolder +"/ambari-logsearch/docker/test-logs", testLogsVolume);
-      Bind testConfigBind = new Bind(ambariFolder +"/ambari-logsearch/docker/test-config", testConfigVolume);
-      Bind ambariRootBind = new Bind(ambariFolder, ambariVolume);
-      Bind logfeederClassesBind = new Bind(ambariFolder + "/ambari-logsearch/ambari-logsearch-logfeeder/target/classes", logfeederClassesVolume);
-      Bind logsearchClassesBind = new Bind(ambariFolder + "/ambari-logsearch/ambari-logsearch-portal/target/classes", logsearchClassesVolume);
-      Bind logsearchWebappBind = new Bind(ambariFolder + "/ambari-logsearch/ambari-logsearch-portal/src/main/webapp", logsearchWebappVolume);
-
-      // port bindings
-      Ports ports = new Ports();
-      ports.bind(new ExposedPort(5005), new Ports.Binding("0.0.0.0", "5005"));
-      ports.bind(new ExposedPort(5006), new Ports.Binding("0.0.0.0", "5006"));
-      ports.bind(new ExposedPort(StoryDataRegistry.INSTANCE.getSolrPort()), new Ports.Binding("0.0.0.0", "8886"));
-      ports.bind(new ExposedPort(StoryDataRegistry.INSTANCE.getLogsearchPort()), new Ports.Binding("0.0.0.0", "61888"));
-      ports.bind(new ExposedPort(StoryDataRegistry.INSTANCE.getZookeeperPort()), new Ports.Binding("0.0.0.0", "9983"));
-
-      LOG.info("Creating docker cointainer...");
-      CreateContainerResponse createResponse = dockerClient.createContainerCmd("ambari-logsearch:v1.0")
-        .withHostName("logsearch.apache.org")
-        .withName("logsearch")
-        .withVolumes(testLogsVolume, testConfigVolume, ambariVolume, logfeederClassesVolume, logsearchClassesVolume, logsearchWebappVolume)
-        .withBinds(testLogsBind, testConfigBind, ambariRootBind, logfeederClassesBind, logsearchClassesBind, logsearchWebappBind)
-        .withExposedPorts(
-          new ExposedPort(StoryDataRegistry.INSTANCE.getLogsearchPort()),
-          new ExposedPort(5005),
-          new ExposedPort(5006),
-          new ExposedPort(StoryDataRegistry.INSTANCE.getSolrPort()),
-          new ExposedPort(StoryDataRegistry.INSTANCE.getZookeeperPort()))
-        .withPortBindings(ports)
-        .exec();
-      LOG.info("Created docker container id: {}", createResponse.getId());
-
-      dockerClient.startContainerCmd(createResponse.getId()).exec();
-      StoryDataRegistry.INSTANCE.setLogsearchContainerStarted(true);
-      String dockerHostFromUri = StoryDataRegistry.INSTANCE.getDockerClientConfig().getDockerHost().getHost();
-      StoryDataRegistry.INSTANCE.setDockerHost(dockerHostFromUri);
-      checkHostAndPortReachable(dockerHostFromUri, StoryDataRegistry.INSTANCE.getLogsearchPort(), "LogSearch");
-      waitUntilSolrHasAnyData();
-
-      LOG.info("Waiting for logfeeder to finish the test log parsings... (10 sec)");
-      Thread.sleep(10000);
-    }
+    initDockerContainer();
   }
 
   @When("logfeeder started (parse logs & send data to solr)")
   public void logfeederStarted() throws Exception {
     // TODO: run ps aux to check LogFeeder process with docker exec
-    /**
-    DockerClient dockerClient = StoryDataRegistry.INSTANCE.getDockerClient();
-    ExecCreateCmdResponse execResp = dockerClient
-      .execCreateCmd(containerId)
-      .withAttachStdout(true)
-      .withCmd("ps", "aux").exec();
-    execResp.getId();
-    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
-    ExecStartResultCallback res = dockerClient
-      .execStartCmd(execResp.getId())
-      .withDetach(true)
-      .withTty(true)
-      .exec(new ExecStartResultCallback(outputStream,  outputStream)).awaitCompletion();
-     **/
   }
 
   @BeforeStories
-  public void checkDockerApi() {
-    LOG.info("Tries to setup docker client configuration");
-    final String dockerHost = System.getenv("DOCKER_HOST");
-    final String dockerCertPath = System.getenv("DOCKER_CERT_PATH");
-    final String dockerApiVersion = System.getenv("DOCKER_API_VERSION") == null ? "1.20" : System.getenv("DOCKER_API_VERSION");
-
-    Preconditions.checkArgument(dockerHost != null, "Set 'DOCKER_HOST' env variable");
-    Preconditions.checkArgument(dockerCertPath != null, "Set 'DOCKER_CERT_PATH' env variable");
-    LOG.info("DOCKER_HOST: {}", dockerHost);
-    LOG.info("DOCKER_CERT_PATH: {}", dockerCertPath);
-    LOG.info("DOCKER_API_VERSION: {}", dockerApiVersion);
-    DockerClientConfig dockerClientConfig = DockerClientConfig.createDefaultConfigBuilder()
-      .withDockerHost(dockerHost)
-      .withDockerCertPath(dockerCertPath)
-      .withApiVersion(dockerApiVersion)
-      .withDockerTlsVerify(true)
-      .build();
-    StoryDataRegistry.INSTANCE.setDockerClientConfig(dockerClientConfig);
-    DockerClient dockerClient = DockerClientBuilder.getInstance(dockerClientConfig).build();
-    StoryDataRegistry.INSTANCE.setDockerClient(dockerClient);
-    LOG.info("Docker client setup successfully.");
+  public void initDocker() throws Exception {
+    // TODO: check docker is up
   }
 
   @AfterStories
   public void removeLogSearchContainer() {
-    removeLogSearchContainerIfExists();
-  }
-
-  private void removeLogSearchContainerIfExists() {
-    DockerClient dockerClient = StoryDataRegistry.INSTANCE.getDockerClient();
-    List<Container> containerList = dockerClient
-      .listContainersCmd()
-      .withShowAll(true)
-      .exec();
-
-    boolean isLogSearchContainerExists = false;
-    String containerId = null;
-    for (Container container : containerList) {
-      isLogSearchContainerExists = ArrayUtils.contains(container.getNames(), "/logsearch");
-      if (isLogSearchContainerExists) {
-        containerId = container.getId();
-        break;
-      }
-    }
-
-    if (isLogSearchContainerExists) {
-      LOG.info("Remove logsearch container: {}", containerId);
-      dockerClient.removeContainerCmd(containerId).withForce(true).exec();
-    }
-  }
-
-  private void waitUntilSolrHasAnyData() throws IOException, SolrServerException, InterruptedException {
-    boolean solrHasData = false;
-    CloudSolrClient solrClient = new CloudSolrClient(String.format("%s:%d",
-      StoryDataRegistry.INSTANCE.getDockerHost(),
-      StoryDataRegistry.INSTANCE.getZookeeperPort()));
-    StoryDataRegistry.INSTANCE.setCloudSolrClient(solrClient);
-    SolrQuery solrQuery = new SolrQuery();
-    solrQuery.setQuery("*:*");
-
-    int maxTries = 60;
-    for (int tries = 1; tries < maxTries; tries++) {
-      QueryResponse queryResponse = solrClient.query(StoryDataRegistry.INSTANCE.getServiceLogsCollection(), solrQuery);
-      SolrDocumentList list = queryResponse.getResults();
-      if (list.size() > 0) {
-        solrHasData = true;
-        break;
-      } else {
-        Thread.sleep(2000);
-        LOG.info("Solr has no data yet, retrying...");
-      }
-    }
-    if (!solrHasData) {
-      throw new IllegalStateException(String.format("Solr has no data after %d tries", maxTries));
-    }
-  }
-
-
-  private void checkHostAndPortReachable(String host, int port, String serviceName) throws InterruptedException {
-    boolean reachable = false;
-    int maxTries = 60;
-    for (int tries = 1; tries < maxTries; tries++ ) {
-      try (Socket socket = new Socket()) {
-        socket.connect(new InetSocketAddress(host, port), 1000);
-        reachable = true;
-        break;
-      } catch (IOException e) {
-        Thread.sleep(2000);
-        LOG.info("{} is not reachable yet, retrying..", serviceName);
-      }
-    }
-    if (!reachable) {
-      throw new IllegalStateException(String.format("%s is not reachable after %s tries", serviceName, maxTries));
-    }
+    runCommand(new String[]{StoryDataRegistry.INSTANCE.getShellScriptLocation(), "stop"});
   }
 }
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java
new file mode 100644
index 0000000..b40a2bc
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java
@@ -0,0 +1,212 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.steps;
+
+import junit.framework.Assert;
+import org.apache.ambari.logsearch.domain.StoryDataRegistry;
+import org.apache.ambari.logsearch.web.Home;
+import org.jbehave.core.annotations.AfterScenario;
+import org.jbehave.core.annotations.AfterStories;
+import org.jbehave.core.annotations.AfterStory;
+import org.jbehave.core.annotations.BeforeScenario;
+import org.jbehave.core.annotations.BeforeStories;
+import org.jbehave.core.annotations.BeforeStory;
+import org.jbehave.core.annotations.Given;
+import org.jbehave.core.annotations.Named;
+import org.jbehave.core.annotations.Then;
+import org.jbehave.core.annotations.When;
+import org.jbehave.web.selenium.WebDriverProvider;
+import org.openqa.selenium.By;
+import org.openqa.selenium.NoSuchElementException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.concurrent.TimeUnit;
+
+public class LogSearchUISteps extends AbstractLogSearchSteps {
+
+  private static final Logger LOG = LoggerFactory.getLogger(LogSearchUISteps.class);
+
+  private final WebDriverProvider driverProvider;
+
+  private Home home;
+
+  public LogSearchUISteps(WebDriverProvider driverProvider) {
+    this.driverProvider = driverProvider;
+  }
+
+  @BeforeScenario
+  public void initHomePage() {
+    home = new Home(driverProvider);
+    LOG.info("Init home page: {}", home.getCurrentUrl());
+  }
+
+  @AfterScenario
+  public void deleteCookies() {
+    LOG.info("Delete all cookies...");
+    home.manage().deleteAllCookies();
+  }
+
+  @BeforeStories
+  public void beforeStories() throws Exception {
+    initDockerContainer();
+    LOG.info("Initialize web driver...");
+    StoryDataRegistry.INSTANCE.getWebDriverProvider().initialize();
+    LOG.info("Web driver details: {}",  StoryDataRegistry.INSTANCE.getWebDriverProvider().get().toString());
+  }
+
+  @AfterStory
+  public void closePage() throws Exception {
+    LOG.info("Closing web driver");
+    StoryDataRegistry.INSTANCE.getWebDriverProvider().end();
+  }
+
+  @Given("open logsearch home page")
+  public void initBrowser() {
+    LOG.info("Delete all cookies...");
+    home.manage().deleteAllCookies();
+    LOG.info("Open home page: {}", home.getCurrentUrl());
+    home.open();
+  }
+
+  @When("login with $username / $password")
+  public void login(@Named("username") String userName, @Named("password") String password) {
+    LOG.info("Type username: {}", userName);
+    home.findElement(By.id("username")).sendKeys(userName);
+    LOG.info("Type password: {}", password);
+    home.findElement(By.id("password")).sendKeys(password);
+    LOG.info("Click on Sign In button.");
+    home.findElement(By.className("custLogin")).click();
+    closeTourPopup();
+  }
+
+  @Then("page contains text: '$text'")
+  public void contains(@Named("text") String text) {
+    LOG.info("Check page contains text: '{}'", text);
+    home.found(text);
+  }
+
+  @Then("page does not contain text: '$text'")
+  public void notContains(@Named("text") String text) {
+    LOG.info("Check page does not contain text: '{}'", text);
+    home.notFound(text);
+  }
+
+  @When("wait $seconds seconds")
+  public void waitSeconds(@Named("second") String second) {
+    LOG.info("Wait {} seconds...", second);
+    home.manage().timeouts().implicitlyWait(Integer.parseInt(second), TimeUnit.SECONDS);
+  }
+
+  @When("click on element: $xpath (xpath)")
+  public void clickOnElementByXPath(@Named("xpath") String xPath) {
+    LOG.info("Click on element by xpath: '{}'", xPath);
+    driverProvider.get().findElement(By.xpath(xPath)).click();
+  }
+
+  @When("click on element: $id (id)")
+  public void clickOnElementById(@Named("id") String id) {
+    LOG.info("Click on element by id: '{}'", id);
+    driverProvider.get().findElement(By.xpath(id)).click();
+  }
+
+  @When("click on element: $css (css selector)")
+  public void clickOnElementByCssSelector(@Named("css") String cssSelector) {
+    LOG.info("Click on element by css selector: '{}'", cssSelector);
+    driverProvider.get().findElement(By.cssSelector(cssSelector)).click();
+  }
+
+  @Then("element exists with xpath: $xpath")
+  public void findByXPath(@Named("xpath") String xPath) {
+    LOG.info("Find element by xpath: '{}'", xPath);
+    Assert.assertNotNull(home.findElement(By.xpath(xPath)));
+  }
+
+  @Then("element exists with xpath: $id")
+  public void findById(@Named("id") String id) {
+    LOG.info("Find element by id: '{}'", id);
+    Assert.assertNotNull(home.findElement(By.id(id)));
+  }
+
+  @Then("element exists with css selector: $css")
+  public void findByCssSelector(@Named("css") String cssSelector) {
+    LOG.info("Find element by css selector: '{}'", cssSelector);
+    Assert.assertNotNull(home.findElement(By.cssSelector(cssSelector)));
+  }
+
+  @Then("element text equals '$text', with xpath $xpath")
+  public void equalsByXPath(@Named("text") String text, @Named("xpath") String xPath) {
+    LOG.info("Check text of the element (xpath: '{}') equals with '{}'", xPath, text);
+    Assert.assertEquals(text, home.findElement(By.xpath(xPath)).getText());
+  }
+
+  @Then("element text equals '$text' with id $id")
+  public void equalsyId(@Named("text") String text, @Named("id") String id) {
+    LOG.info("Check text of the element (id: '{}') equals with '{}'", id, text);
+    Assert.assertEquals(text, home.findElement(By.id(id)).getText());
+  }
+
+  @Then("element text equals '$text' with css selector $css")
+  public void equalsCssSelector(@Named("text") String text, @Named("css") String cssSelector) {
+    LOG.info("Check text of the element (css selector: '{}') equals with '{}'", cssSelector, text);
+    Assert.assertEquals(text, home.findElement(By.cssSelector(cssSelector)).getText());
+  }
+
+  @Then("element does not exist with xpath: $xpath")
+  public void doNotFindByXPath(@Named("xpath") String xPath) {
+    try {
+      LOG.info("Check that element does not exist with xpath: {}", xPath);
+      home.findElement(By.xpath(xPath));
+      Assert.fail(String.format("Element is found. xPath: '%s'", xPath));
+    } catch (NoSuchElementException e) {
+      // success
+    }
+  }
+
+  @Then("element does not exist with xpath: $id")
+  public void doNotFindById(@Named("id") String id) {
+    try {
+      LOG.info("Check that element does not exist with id: {}", id);
+      home.findElement(By.xpath(id));
+      Assert.fail(String.format("Element is found. id: '%s'", id));
+    } catch (NoSuchElementException e) {
+      // success
+    }
+  }
+
+  @Then("element does not exist with css selector: $css")
+  public void doNotFindByCssSelector(@Named("css") String cssSelector) {
+    try {
+      LOG.info("Check that element does not exist with css selector: {}", cssSelector);
+      home.findElement(By.xpath(cssSelector));
+      Assert.fail(String.format("Element is found. css selector: '%s'", cssSelector));
+    } catch (NoSuchElementException e) {
+      // success
+    }
+  }
+
+  private void closeTourPopup() {
+    LOG.info("Close Tour popup if needed.");
+    try {
+      home.findElement(By.cssSelector("div.modal-footer > button.btn.btn-default")).click();
+    } catch (NoSuchElementException ex) {
+      // do nothing - no popup
+    }
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java
index 7c72ca7..4420540 100644
--- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java
@@ -35,12 +35,12 @@
   @Then("the number of <component> docs is: <docSize>")
   public void numberOfDocsForComponent(@Named("component") String component, @Named("docSize") int docSize)
     throws IOException, SolrServerException, InterruptedException {
-    SolrClient solrClient = StoryDataRegistry.INSTANCE.getCloudSolrClient();
+    SolrClient solrClient = StoryDataRegistry.INSTANCE.getSolrClient();
     SolrQuery solrQuery = new SolrQuery();
     solrQuery.setQuery(String.format("type:%s", component));
     solrQuery.setStart(0);
     solrQuery.setRows(20);
-    QueryResponse queryResponse = solrClient.query(StoryDataRegistry.INSTANCE.getServiceLogsCollection(), solrQuery);
+    QueryResponse queryResponse = solrClient.query(solrQuery);
     SolrDocumentList list = queryResponse.getResults();
     Assert.assertEquals(docSize, list.size());
   }
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchApiQueryStory.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchApiQueryStory.java
deleted file mode 100644
index 45455bf..0000000
--- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchApiQueryStory.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.story;
-
-public class LogSearchApiQueryStory extends LogSearchStory {
-}
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchBackendStories.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchBackendStories.java
new file mode 100644
index 0000000..fa7a527
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchBackendStories.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.story;
+
+import com.google.common.base.Predicate;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Lists;
+import org.apache.ambari.logsearch.steps.LogSearchApiSteps;
+import org.apache.ambari.logsearch.steps.SolrSteps;
+import org.apache.ambari.logsearch.steps.LogSearchDockerSteps;
+import org.jbehave.core.configuration.Configuration;
+import org.jbehave.core.configuration.MostUsefulConfiguration;
+import org.jbehave.core.junit.JUnitStories;
+import org.jbehave.core.reporters.Format;
+import org.jbehave.core.reporters.StoryReporterBuilder;
+import org.jbehave.core.steps.InjectableStepsFactory;
+import org.jbehave.core.steps.InstanceStepsFactory;
+import org.junit.Test;
+
+import java.util.List;
+
+public class LogSearchBackendStories extends JUnitStories {
+
+  private static final String BACKEND_STORIES_LOCATION_PROPERTY = "backend.stories.location";
+  private static final String STORY_SUFFIX = ".story";
+
+  @Override
+  public Configuration configuration() {
+    return new MostUsefulConfiguration()
+      .useStoryLoader(LogSearchStoryLocator.getStoryLoader(BACKEND_STORIES_LOCATION_PROPERTY, this.getClass()))
+      .useStoryReporterBuilder(
+        new StoryReporterBuilder().withFailureTrace(true).withDefaultFormats().withFormats(Format.CONSOLE, Format.TXT));
+  }
+
+  @Override
+  public InjectableStepsFactory stepsFactory() {
+    return new InstanceStepsFactory(configuration(),
+      new LogSearchDockerSteps(),
+      new SolrSteps(),
+      new LogSearchApiSteps());
+  }
+
+  @Test
+  public void run() throws Throwable {
+    super.run();
+  }
+
+  @Override
+  protected List<String> storyPaths() {
+    List<String> backendStories = LogSearchStoryLocator.findStories(BACKEND_STORIES_LOCATION_PROPERTY, STORY_SUFFIX, this.getClass());
+    return Lists.newArrayList(Collections2.filter(backendStories, new Predicate<String>() {
+      @Override
+      public boolean apply(String storyFileName) {
+        return !storyFileName.endsWith("ui.story");
+      }
+    }));
+  }
+
+}
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStory.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStory.java
deleted file mode 100644
index ce6b9cb..0000000
--- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStory.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.story;
-
-import org.apache.ambari.logsearch.steps.LogSearchApiSteps;
-import org.apache.ambari.logsearch.steps.SolrSteps;
-import org.apache.ambari.logsearch.steps.LogSearchDockerSteps;
-import org.jbehave.core.configuration.Configuration;
-import org.jbehave.core.configuration.MostUsefulConfiguration;
-import org.jbehave.core.io.LoadFromClasspath;
-import org.jbehave.core.io.StoryPathResolver;
-import org.jbehave.core.io.UnderscoredCamelCaseResolver;
-import org.jbehave.core.junit.JUnitStory;
-import org.jbehave.core.reporters.Format;
-import org.jbehave.core.reporters.StoryReporterBuilder;
-import org.jbehave.core.steps.InjectableStepsFactory;
-import org.jbehave.core.steps.InstanceStepsFactory;
-import org.junit.Test;
-
-abstract public class LogSearchStory extends JUnitStory {
-  @Override
-  public Configuration configuration() {
-    StoryPathResolver storyPathResolver = new UnderscoredCamelCaseResolver(".story");
-    return new MostUsefulConfiguration()
-      .useStoryPathResolver(storyPathResolver)
-      .useStoryLoader(new LoadFromClasspath(this.getClass()))
-      .useStoryReporterBuilder(
-        new StoryReporterBuilder().withFailureTrace(true).withDefaultFormats().withFormats(Format.CONSOLE, Format.TXT));
-  }
-
-  @Override
-  public InjectableStepsFactory stepsFactory() {
-    return new InstanceStepsFactory(configuration(),
-      new LogSearchDockerSteps(),
-      new SolrSteps(),
-      new LogSearchApiSteps());
-  }
-
-  @Test
-  public void run() throws Throwable {
-    super.run();
-  }
-
-}
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStoryLocator.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStoryLocator.java
new file mode 100644
index 0000000..bed7999
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStoryLocator.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.story;
+
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.StringUtils;
+import org.jbehave.core.io.LoadFromClasspath;
+import org.jbehave.core.io.LoadFromRelativeFile;
+import org.jbehave.core.io.StoryFinder;
+import org.jbehave.core.io.StoryLoader;
+
+import java.io.File;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.jbehave.core.io.CodeLocations.codeLocationFromClass;
+
+/**
+ * Helper class for loading story files from the classpath or externally - based on system properties
+ */
+public class LogSearchStoryLocator {
+
+  private LogSearchStoryLocator() {
+  }
+
+  /**
+   * Get the proper story loader based on story location property (if empty or NONE - use story loading from classpath)
+   * @param property Story location property (absolute path - folder)
+   * @param clazz Class of the *Stories object
+   */
+  public static StoryLoader getStoryLoader(String property, Class clazz) {
+    boolean useExternalStoryLocation = useExternalStoryLocation(property);
+    if (useExternalStoryLocation) {
+      try {
+        return new LoadFromRelativeFile(new URL("file://" + System.getProperty(property)));
+      } catch (Exception e) {
+        throw new RuntimeException("Cannot load story files from url: file://" + System.getProperty(property));
+      }
+    } else {
+      return new LoadFromClasspath(clazz);
+    }
+  }
+
+
+  /**
+   * Find stories based on story location property, if the property is not set or NONE, then the story files will be loaded from the classpath
+   * @param property Story location property (absolute path - folder)
+   * @param suffix Story suffix for specific stories - i.e. : .ui.story
+   * @param clazz Class of the *Stories object
+   */
+  public static List<String> findStories(String property, String suffix, Class clazz) {
+    List<String> stories = null;
+    if (useExternalStoryLocation(property)) {
+      stories = findStoriesInFolder(System.getProperty(property), suffix);
+    } else {
+      stories = new StoryFinder()
+        .findPaths(codeLocationFromClass(clazz).getFile(), Arrays.asList(String.format("**/*%s", suffix)), null);
+    }
+    return stories;
+  }
+
+  private static List<String> findStoriesInFolder(String folderAbsolutePath, String suffix) {
+    List<String> results = Lists.newArrayList();
+    File folder = new File(folderAbsolutePath);
+    File[] listOfFiles = folder.listFiles();
+    if (listOfFiles != null) {
+      for (File file : listOfFiles) {
+        if (file.getName().endsWith(suffix)) {
+          results.add(file.getName());
+        }
+      }
+    }
+    return results;
+  }
+
+  private static boolean useExternalStoryLocation(String property) {
+    String storyLocationProp = System.getProperty(property);
+    return StringUtils.isNotEmpty(storyLocationProp) && !"NONE".equals(storyLocationProp);
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchUIStories.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchUIStories.java
new file mode 100644
index 0000000..5417ab1
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchUIStories.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.story;
+
+import org.apache.ambari.logsearch.domain.StoryDataRegistry;
+import org.apache.ambari.logsearch.steps.LogSearchDockerSteps;
+import org.apache.ambari.logsearch.steps.LogSearchUISteps;
+import org.jbehave.core.configuration.Configuration;
+import org.jbehave.core.Embeddable;
+import org.jbehave.core.embedder.executors.SameThreadExecutors;
+import org.jbehave.core.junit.JUnitStories;
+import org.jbehave.core.reporters.StoryReporterBuilder;
+import org.jbehave.core.steps.InjectableStepsFactory;
+import org.jbehave.core.steps.InstanceStepsFactory;
+import org.jbehave.web.selenium.RemoteWebDriverProvider;
+import org.jbehave.web.selenium.SeleniumConfiguration;
+import org.jbehave.web.selenium.SeleniumContext;
+import org.jbehave.web.selenium.WebDriverProvider;
+import org.jbehave.web.selenium.WebDriverScreenshotOnFailure;
+import org.openqa.selenium.Platform;
+import org.openqa.selenium.remote.DesiredCapabilities;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.jbehave.core.io.CodeLocations.codeLocationFromClass;
+import static org.jbehave.core.reporters.Format.CONSOLE;
+import static org.jbehave.core.reporters.Format.HTML;
+import static org.jbehave.core.reporters.Format.TXT;
+import static org.jbehave.core.reporters.Format.XML;
+
+public class LogSearchUIStories extends JUnitStories {
+
+  private WebDriverProvider driverProvider;
+  private SeleniumContext context;
+
+  private static final String UI_STORIES_LOCATION_PROPERTY = "ui.stories.location";
+  private static final String STORY_SUFFIX = ".ui.story";
+
+  public LogSearchUIStories() {
+    String dockerHost = System.getProperty("docker.host") != null ? System.getProperty("docker.host") : "localhost";
+    System.setProperty("REMOTE_WEBDRIVER_URL", String.format("http://%s:4444/wd/hub", dockerHost));
+    DesiredCapabilities capability = DesiredCapabilities.firefox();
+    capability.setPlatform(Platform.LINUX);
+    capability.setVersion("45.8.0");
+    driverProvider = new RemoteWebDriverProvider(capability);
+    StoryDataRegistry.INSTANCE.setWebDriverProvider(driverProvider);
+    context = new SeleniumContext();
+    configuredEmbedder().useExecutorService(new SameThreadExecutors().create(configuredEmbedder().embedderControls()));
+  }
+
+  @Override
+  public Configuration configuration() {
+    Class<? extends Embeddable> embeddableClass = this.getClass();
+    return new SeleniumConfiguration()
+      .useSeleniumContext(context)
+      .useWebDriverProvider(driverProvider)
+      .useStoryLoader(LogSearchStoryLocator.getStoryLoader(UI_STORIES_LOCATION_PROPERTY, this.getClass()))
+      .useStoryReporterBuilder(new StoryReporterBuilder()
+        .withCodeLocation(codeLocationFromClass(embeddableClass))
+        .withDefaultFormats()
+        .withFormats(CONSOLE, TXT, HTML, XML));
+  }
+
+  @Override
+  public InjectableStepsFactory stepsFactory() {
+    Configuration configuration = configuration();
+    return new InstanceStepsFactory(configuration, new LogSearchDockerSteps(), new LogSearchUISteps(driverProvider),
+      new WebDriverScreenshotOnFailure(driverProvider, configuration.storyReporterBuilder()));
+  }
+
+  @Override
+  protected List<String> storyPaths() {
+    return LogSearchStoryLocator.findStories(UI_STORIES_LOCATION_PROPERTY, STORY_SUFFIX, this.getClass());
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogfeederParsingStory.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogfeederParsingStory.java
deleted file mode 100644
index c502cc4..0000000
--- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogfeederParsingStory.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.story;
-
-public class LogfeederParsingStory extends LogSearchStory {
-}
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/AbstractPage.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/AbstractPage.java
new file mode 100644
index 0000000..b6d0a58
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/AbstractPage.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.web;
+
+import org.jbehave.web.selenium.WebDriverPage;
+import org.jbehave.web.selenium.WebDriverProvider;
+
+import java.util.List;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.fail;
+
+public abstract class AbstractPage extends WebDriverPage {
+
+  public AbstractPage(WebDriverProvider driverProvider) {
+    super(driverProvider);
+  }
+
+  public void found(String text) {
+    found(getPageSource(), text);
+  }
+
+  public void found(String pageSource, String text) {
+    if (!pageSource.contains(escapeHtml(text))) {
+      fail("Text: '" + text + "' not found in page '" + pageSource + "'");
+    }
+  }
+
+  public void found(List<String> texts) {
+    for (String text : texts) {
+      found(text);
+    }
+  }
+
+  public void notFound(String text) {
+    notFound(getPageSource(), text);
+  }
+
+  public void notFound(String pageSource, String text) {
+    assertThat(pageSource.contains(escapeHtml(text)), is(false));
+  }
+
+  private String escapeHtml(String text) {
+    return text.replace("<", "&lt;").replace(">", "&gt;");
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/Home.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/Home.java
new file mode 100644
index 0000000..6c576d4
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/Home.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.web;
+
+import org.apache.ambari.logsearch.domain.StoryDataRegistry;
+import org.jbehave.web.selenium.WebDriverProvider;
+
+import java.util.concurrent.TimeUnit;
+
+public class Home extends AbstractPage {
+
+  public Home(WebDriverProvider driverProvider) {
+    super(driverProvider);
+  }
+
+  public void open() {
+    get(String.format("http://%s:%d/index.html",
+      StoryDataRegistry.INSTANCE.getDockerHost(),
+      StoryDataRegistry.INSTANCE.getLogsearchPort()));
+    manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS);
+  }
+
+}
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/log_search_api_query_story.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/log_search_api_query_story.story
deleted file mode 100644
index 5abe8b4..0000000
--- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/log_search_api_query_story.story
+++ /dev/null
@@ -1,17 +0,0 @@
-Meta:
-
-Narrative:
-As a user
-I want to perform queries against Log Search api
-So that I can validate the json outputs
-
-Scenario: scenario description
-
-Given logsearch docker container
-When LogSearch api query sent: <apiQuery>
-Then The api query result is <jsonResult>
-
-Examples:
-|apiQuery|jsonResult|
-|/api/v1/service/logs/schema/fields|service-log-schema.json|
-|/api/v1/service/logs/levels/counts/namevalues?page=0&pageSize=25&startIndex=0&q=*%3A*|service-log-level-counts-values.json|
\ No newline at end of file
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/log_search_api_tests.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/log_search_api_tests.story
new file mode 100644
index 0000000..0af00f5
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/log_search_api_tests.story
@@ -0,0 +1,17 @@
+Meta:
+
+Narrative:
+As a user
+I want to perform queries against Log Search api
+So that I can validate the json outputs
+
+Scenario: Log Search API JSON responses
+
+Given logsearch docker container
+When LogSearch api query sent: <apiQuery>
+Then The api query result is <jsonResult>
+
+Examples:
+|apiQuery|jsonResult|
+|/api/v1/service/logs/schema/fields|service-log-schema.json|
+|/api/v1/service/logs/levels/counts?page=0&pageSize=25&startIndex=0&q=*%3A*|service-log-level-counts-values.json|
\ No newline at end of file
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/logfeeder_parsing_story.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/logfeeder_parsing_tests.story
similarity index 100%
rename from ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/logfeeder_parsing_story.story
rename to ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/logfeeder_parsing_tests.story
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story
new file mode 100644
index 0000000..543c211
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story
@@ -0,0 +1,20 @@
+Meta:
+
+Narrative:
+As a user
+I want to start LogSearch services and login to the UI
+So that I can validate the proper user
+
+Scenario: login with admin/admin
+
+Given logsearch docker container
+And open logsearch home page
+When login with admin / admin
+Then page contains text: 'Service Logs'
+
+Scenario: login with admin and wrong password
+
+Given logsearch docker container
+And open logsearch home page
+When login with admin / wrongpassword
+Then page does not contain text: 'Service Logs'
\ No newline at end of file
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml b/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
index 25e4306..5d6f8b6 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
@@ -44,6 +44,11 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-config-zookeeper</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
       <groupId>commons-codec</groupId>
       <artifactId>commons-codec</artifactId>
     </dependency>
@@ -88,7 +93,6 @@
       <artifactId>commons-logging</artifactId>
       <version>1.1.1</version>
     </dependency>
-
     <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
@@ -125,9 +129,9 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
-    <groupId>com.amazonaws</groupId>
-    <artifactId>aws-java-sdk-s3</artifactId>
-    <version>1.11.5</version>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-s3</artifactId>
+      <version>1.11.5</version>
     </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>
@@ -135,26 +139,40 @@
       <version>1.11</version>
     </dependency>
     <dependency>
-    <groupId>com.amazonaws</groupId>
-    <artifactId>aws-java-sdk-iam</artifactId>
-    <version>1.11.5</version>
-  </dependency>
-   <dependency>
-    <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-common</artifactId>
-    <version>${hadoop.version}</version>
-  </dependency>
-  <dependency>
-    <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-hdfs</artifactId>
-    <version>${hadoop.version}</version>
-  </dependency>
-  <dependency>
-    <groupId>commons-io</groupId>
-    <artifactId>commons-io</artifactId>
-    <version>${common.io.version}</version>
-  </dependency>
- </dependencies>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-iam</artifactId>
+      <version>1.11.5</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.curator</groupId>
+          <artifactId>curator-framework</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.curator</groupId>
+          <artifactId>curator-client</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.curator</groupId>
+          <artifactId>curator-recipes</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>${common.io.version}</version>
+    </dependency>
+  </dependencies>
   <build>
     <finalName>LogFeeder</finalName>
     <pluginManagement>
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
index a47c71f..8d7c69f 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
@@ -19,60 +19,37 @@
 
 package org.apache.ambari.logfeeder;
 
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.lang.reflect.Type;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
-import java.util.Set;
 
-import org.apache.ambari.logfeeder.filter.Filter;
-import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.common.ConfigHandler;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+import org.apache.ambari.logsearch.config.api.LogSearchConfigFactory;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig.Component;
+import org.apache.ambari.logsearch.config.zookeeper.LogSearchConfigZK;
+import org.apache.ambari.logfeeder.input.InputConfigUploader;
 import org.apache.ambari.logfeeder.input.InputManager;
-import org.apache.ambari.logfeeder.input.InputSimulate;
-import org.apache.ambari.logfeeder.logconfig.LogConfigHandler;
+import org.apache.ambari.logfeeder.loglevelfilter.LogLevelFilterHandler;
 import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.metrics.MetricsManager;
-import org.apache.ambari.logfeeder.output.Output;
-import org.apache.ambari.logfeeder.output.OutputManager;
-import org.apache.ambari.logfeeder.util.AliasUtil;
-import org.apache.ambari.logfeeder.util.FileUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.SSLUtil;
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.ambari.logfeeder.util.AliasUtil.AliasType;
+import com.google.common.collect.Maps;
 import org.apache.hadoop.util.ShutdownHookManager;
-import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
-import com.google.gson.reflect.TypeToken;
-
 public class LogFeeder {
   private static final Logger LOG = Logger.getLogger(LogFeeder.class);
 
   private static final int LOGFEEDER_SHUTDOWN_HOOK_PRIORITY = 30;
   private static final int CHECKPOINT_CLEAN_INTERVAL_MS = 24 * 60 * 60 * 60 * 1000; // 24 hours
 
-  private OutputManager outputManager = new OutputManager();
+  private ConfigHandler configHandler = new ConfigHandler();
+  private LogSearchConfig config;
+  
   private InputManager inputManager = new InputManager();
   private MetricsManager metricsManager = new MetricsManager();
 
-  public static Map<String, Object> globalConfigs = new HashMap<>();
-
-  private List<Map<String, Object>> inputConfigList = new ArrayList<>();
-  private List<Map<String, Object>> filterConfigList = new ArrayList<>();
-  private List<Map<String, Object>> outputConfigList = new ArrayList<>();
-  
   private long lastCheckPointCleanedMS = 0;
   private boolean isLogfeederCompleted = false;
   private Thread statLoggerThread = null;
@@ -91,329 +68,23 @@
   }
 
   private void init() throws Throwable {
-    Date startTime = new Date();
+    long startTime = System.currentTimeMillis();
 
-    loadConfigFiles();
-    addSimulatedInputs();
-    mergeAllConfigs();
-    
+    configHandler.init();
     SSLUtil.ensureStorePasswords();
     
-    outputManager.init();
-    inputManager.init();
-    metricsManager.init();
+    config = LogSearchConfigFactory.createLogSearchConfig(Component.LOGFEEDER,
+        Maps.fromProperties(LogFeederUtil.getProperties()), LogSearchConfigZK.class);
+    LogLevelFilterHandler.init(config);
+    InputConfigUploader.load(config);
+    config.monitorInputConfigChanges(configHandler, new LogLevelFilterHandler());
     
-    LogConfigHandler.handleConfig();
+    metricsManager.init();
     
     LOG.debug("==============");
     
-    Date endTime = new Date();
-    LOG.info("Took " + (endTime.getTime() - startTime.getTime()) + " ms to initialize");
-  }
-
-  private void loadConfigFiles() throws Exception {
-    List<String> configFiles = getConfigFiles();
-    for (String configFileName : configFiles) {
-      LOG.info("Going to load config file:" + configFileName);
-      configFileName = configFileName.replace("\\ ", "%20");
-      File configFile = new File(configFileName);
-      if (configFile.exists() && configFile.isFile()) {
-        LOG.info("Config file exists in path." + configFile.getAbsolutePath());
-        loadConfigsUsingFile(configFile);
-      } else {
-        LOG.info("Trying to load config file from classloader: " + configFileName);
-        loadConfigsUsingClassLoader(configFileName);
-        LOG.info("Loaded config file from classloader: " + configFileName);
-      }
-    }
-  }
-
-  private List<String> getConfigFiles() {
-    List<String> configFiles = new ArrayList<>();
-    
-    String logfeederConfigFilesProperty = LogFeederUtil.getStringProperty("logfeeder.config.files");
-    LOG.info("logfeeder.config.files=" + logfeederConfigFilesProperty);
-    if (logfeederConfigFilesProperty != null) {
-      configFiles.addAll(Arrays.asList(logfeederConfigFilesProperty.split(",")));
-    }
-
-    String inputConfigDir = LogFeederUtil.getStringProperty("input_config_dir");
-    if (StringUtils.isNotEmpty(inputConfigDir)) {
-      File configDirFile = new File(inputConfigDir);
-      List<File> inputConfigFiles = FileUtil.getAllFileFromDir(configDirFile, "json", false);
-      for (File inputConfigFile : inputConfigFiles) {
-        configFiles.add(inputConfigFile.getAbsolutePath());
-      }
-    }
-    
-    if (CollectionUtils.isEmpty(configFiles)) {
-      String configFileProperty = LogFeederUtil.getStringProperty("config.file", "config.json");
-      configFiles.addAll(Arrays.asList(configFileProperty.split(",")));
-    }
-    
-    return configFiles;
-  }
-
-  private void loadConfigsUsingFile(File configFile) throws Exception {
-    try {
-      String configData = FileUtils.readFileToString(configFile);
-      loadConfigs(configData);
-    } catch (Exception t) {
-      LOG.error("Error opening config file. configFilePath=" + configFile.getAbsolutePath());
-      throw t;
-    }
-  }
-
-  private void loadConfigsUsingClassLoader(String configFileName) throws Exception {
-    try (BufferedInputStream fis = (BufferedInputStream) this.getClass().getClassLoader().getResourceAsStream(configFileName)) {
-      String configData = IOUtils.toString(fis);
-      loadConfigs(configData);
-    }
-  }
-
-  @SuppressWarnings("unchecked")
-  private void loadConfigs(String configData) throws Exception {
-    Type type = new TypeToken<Map<String, Object>>() {}.getType();
-    Map<String, Object> configMap = LogFeederUtil.getGson().fromJson(configData, type);
-
-    // Get the globals
-    for (String key : configMap.keySet()) {
-      switch (key) {
-        case "global" :
-          globalConfigs.putAll((Map<String, Object>) configMap.get(key));
-          break;
-        case "input" :
-          List<Map<String, Object>> inputConfig = (List<Map<String, Object>>) configMap.get(key);
-          inputConfigList.addAll(inputConfig);
-          break;
-        case "filter" :
-          List<Map<String, Object>> filterConfig = (List<Map<String, Object>>) configMap.get(key);
-          filterConfigList.addAll(filterConfig);
-          break;
-        case "output" :
-          List<Map<String, Object>> outputConfig = (List<Map<String, Object>>) configMap.get(key);
-          outputConfigList.addAll(outputConfig);
-          break;
-        default :
-          LOG.warn("Unknown config key: " + key);
-      }
-    }
-  }
-  
-  private void addSimulatedInputs() {
-    int simulatedInputNumber = LogFeederUtil.getIntProperty("logfeeder.simulate.input_number", 0);
-    if (simulatedInputNumber == 0)
-      return;
-    
-    InputSimulate.loadTypeToFilePath(inputConfigList);
-    inputConfigList.clear();
-    
-    for (int i = 0; i < simulatedInputNumber; i++) {
-      HashMap<String, Object> mapList = new HashMap<String, Object>();
-      mapList.put("source", "simulate");
-      mapList.put("rowtype", "service");
-      inputConfigList.add(mapList);
-    }
-  }
-
-  private void mergeAllConfigs() {
-    loadOutputs();
-    loadInputs();
-    loadFilters();
-    
-    assignOutputsToInputs();
-  }
-
-  private void loadOutputs() {
-    for (Map<String, Object> map : outputConfigList) {
-      if (map == null) {
-        continue;
-      }
-      mergeBlocks(globalConfigs, map);
-
-      String value = (String) map.get("destination");
-      if (StringUtils.isEmpty(value)) {
-        LOG.error("Output block doesn't have destination element");
-        continue;
-      }
-      Output output = (Output) AliasUtil.getClassInstance(value, AliasType.OUTPUT);
-      if (output == null) {
-        LOG.error("Output object could not be found");
-        continue;
-      }
-      output.setDestination(value);
-      output.loadConfig(map);
-
-      // We will only check for is_enabled out here. Down below we will check whether this output is enabled for the input
-      if (output.getBooleanValue("is_enabled", true)) {
-        output.logConfigs(Level.INFO);
-        outputManager.add(output);
-      } else {
-        LOG.info("Output is disabled. So ignoring it. " + output.getShortDescription());
-      }
-    }
-  }
-
-  private void loadInputs() {
-    for (Map<String, Object> map : inputConfigList) {
-      if (map == null) {
-        continue;
-      }
-      mergeBlocks(globalConfigs, map);
-
-      String value = (String) map.get("source");
-      if (StringUtils.isEmpty(value)) {
-        LOG.error("Input block doesn't have source element");
-        continue;
-      }
-      Input input = (Input) AliasUtil.getClassInstance(value, AliasType.INPUT);
-      if (input == null) {
-        LOG.error("Input object could not be found");
-        continue;
-      }
-      input.setType(value);
-      input.loadConfig(map);
-
-      if (input.isEnabled()) {
-        input.setOutputManager(outputManager);
-        input.setInputManager(inputManager);
-        inputManager.add(input);
-        input.logConfigs(Level.INFO);
-      } else {
-        LOG.info("Input is disabled. So ignoring it. " + input.getShortDescription());
-      }
-    }
-  }
-
-  private void loadFilters() {
-    sortFilters();
-
-    List<Input> toRemoveInputList = new ArrayList<Input>();
-    for (Input input : inputManager.getInputList()) {
-      for (Map<String, Object> map : filterConfigList) {
-        if (map == null) {
-          continue;
-        }
-        mergeBlocks(globalConfigs, map);
-
-        String value = (String) map.get("filter");
-        if (StringUtils.isEmpty(value)) {
-          LOG.error("Filter block doesn't have filter element");
-          continue;
-        }
-        Filter filter = (Filter) AliasUtil.getClassInstance(value, AliasType.FILTER);
-        if (filter == null) {
-          LOG.error("Filter object could not be found");
-          continue;
-        }
-        filter.loadConfig(map);
-        filter.setInput(input);
-
-        if (filter.isEnabled()) {
-          filter.setOutputManager(outputManager);
-          input.addFilter(filter);
-          filter.logConfigs(Level.INFO);
-        } else {
-          LOG.debug("Ignoring filter " + filter.getShortDescription() + " for input " + input.getShortDescription());
-        }
-      }
-      
-      if (input.getFirstFilter() == null) {
-        toRemoveInputList.add(input);
-      }
-    }
-
-    for (Input toRemoveInput : toRemoveInputList) {
-      LOG.warn("There are no filters, we will ignore this input. " + toRemoveInput.getShortDescription());
-      inputManager.removeInput(toRemoveInput);
-    }
-  }
-
-  private void sortFilters() {
-    Collections.sort(filterConfigList, new Comparator<Map<String, Object>>() {
-
-      @Override
-      public int compare(Map<String, Object> o1, Map<String, Object> o2) {
-        Object o1Sort = o1.get("sort_order");
-        Object o2Sort = o2.get("sort_order");
-        if (o1Sort == null || o2Sort == null) {
-          return 0;
-        }
-        
-        int o1Value = parseSort(o1, o1Sort);
-        int o2Value = parseSort(o2, o2Sort);
-        
-        return o1Value - o2Value;
-      }
-
-      private int parseSort(Map<String, Object> map, Object o) {
-        if (!(o instanceof Number)) {
-          try {
-            return (new Double(Double.parseDouble(o.toString()))).intValue();
-          } catch (Throwable t) {
-            LOG.error("Value is not of type Number. class=" + o.getClass().getName() + ", value=" + o.toString()
-              + ", map=" + map.toString());
-            return 0;
-          }
-        } else {
-          return ((Number) o).intValue();
-        }
-      }
-    });
-  }
-
-  private void assignOutputsToInputs() {
-    Set<Output> usedOutputSet = new HashSet<Output>();
-    for (Input input : inputManager.getInputList()) {
-      for (Output output : outputManager.getOutputs()) {
-        if (LogFeederUtil.isEnabled(output.getConfigs(), input.getConfigs())) {
-          usedOutputSet.add(output);
-          input.addOutput(output);
-        }
-      }
-    }
-    
-    // In case of simulation copies of the output are added for each simulation instance, these must be added to the manager
-    for (Output output : InputSimulate.getSimulateOutputs()) {
-      outputManager.add(output);
-      usedOutputSet.add(output);
-    }
-    
-    outputManager.retainUsedOutputs(usedOutputSet);
-  }
-
-  @SuppressWarnings("unchecked")
-  private void mergeBlocks(Map<String, Object> fromMap, Map<String, Object> toMap) {
-    for (String key : fromMap.keySet()) {
-      Object objValue = fromMap.get(key);
-      if (objValue == null) {
-        continue;
-      }
-      if (objValue instanceof Map) {
-        Map<String, Object> globalFields = LogFeederUtil.cloneObject((Map<String, Object>) objValue);
-
-        Map<String, Object> localFields = (Map<String, Object>) toMap.get(key);
-        if (localFields == null) {
-          localFields = new HashMap<String, Object>();
-          toMap.put(key, localFields);
-        }
-
-        if (globalFields != null) {
-          for (String fieldKey : globalFields.keySet()) {
-            if (!localFields.containsKey(fieldKey)) {
-              localFields.put(fieldKey, globalFields.get(fieldKey));
-            }
-          }
-        }
-      }
-    }
-
-    // Let's add the rest of the top level fields if missing
-    for (String key : fromMap.keySet()) {
-      if (!toMap.containsKey(key)) {
-        toMap.put(key, fromMap.get(key));
-      }
-    }
+    long endTime = System.currentTimeMillis();
+    LOG.info("Took " + (endTime - startTime) + " ms to initialize");
   }
 
   private class JVMShutdownHook extends Thread {
@@ -422,10 +93,8 @@
       try {
         LOG.info("Processing is shutting down.");
 
-        inputManager.close();
-        outputManager.close();
-        inputManager.checkInAll();
-
+        configHandler.close();
+        config.close();
         logStats();
 
         LOG.info("LogSearch is exiting.");
@@ -436,7 +105,6 @@
   }
 
   private void monitor() throws Exception {
-    inputManager.monitor();
     JVMShutdownHook logfeederJVMHook = new JVMShutdownHook();
     ShutdownHookManager.get().addShutdownHook(logfeederJVMHook, LOGFEEDER_SHUTDOWN_HOOK_PRIORITY);
     
@@ -458,7 +126,7 @@
 
           if (System.currentTimeMillis() > (lastCheckPointCleanedMS + CHECKPOINT_CLEAN_INTERVAL_MS)) {
             lastCheckPointCleanedMS = System.currentTimeMillis();
-            inputManager.cleanCheckPointFiles();
+            configHandler.cleanCheckPointFiles();
           }
 
           if (isLogfeederCompleted) {
@@ -474,13 +142,11 @@
   }
 
   private void logStats() {
-    inputManager.logStats();
-    outputManager.logStats();
+    configHandler.logStats();
 
     if (metricsManager.isMetricsEnabled()) {
       List<MetricData> metricsList = new ArrayList<MetricData>();
-      inputManager.addMetricsContainers(metricsList);
-      outputManager.addMetricsContainers(metricsList);
+      configHandler.addMetrics(metricsList);
       metricsManager.useMetrics(metricsList);
     }
   }
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
index 68897e8..cfcc199 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
@@ -20,54 +20,19 @@
 package org.apache.ambari.logfeeder.common;
 
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.log4j.Logger;
 import org.apache.log4j.Priority;
 
 
-public abstract class ConfigBlock {
-  private static final Logger LOG = Logger.getLogger(ConfigBlock.class);
-
-  private boolean drain = false;
-
+public abstract class ConfigBlock extends ConfigItem {
   protected Map<String, Object> configs;
   protected Map<String, String> contextFields = new HashMap<String, String>();
-  public MetricData statMetric = new MetricData(getStatMetricName(), false);
-  protected String getStatMetricName() {
-    return null;
-  }
-  
   public ConfigBlock() {
   }
 
-  /**
-   * Used while logging. Keep it short and meaningful
-   */
-  public abstract String getShortDescription();
-
-  /**
-   * Every implementor need to give name to the thread they create
-   */
-  public String getNameForThread() {
-    return this.getClass().getSimpleName();
-  }
-
-  public void addMetricsContainers(List<MetricData> metricsList) {
-    metricsList.add(statMetric);
-  }
-
-  /**
-   * This method needs to be overwritten by deriving classes.
-   */
-  public void init() throws Exception {
-  }
-
   public void loadConfig(Map<String, Object> map) {
     configs = LogFeederUtil.cloneObject(map);
 
@@ -81,46 +46,6 @@
     return configs;
   }
 
-  @SuppressWarnings("unchecked")
-  public boolean isEnabled() {
-    boolean isEnabled = getBooleanValue("is_enabled", true);
-    if (isEnabled) {
-      // Let's check for static conditions
-      Map<String, Object> conditions = (Map<String, Object>) configs.get("conditions");
-      boolean allow = true;
-      if (MapUtils.isNotEmpty(conditions)) {
-        allow = false;
-        for (String conditionType : conditions.keySet()) {
-          if (conditionType.equalsIgnoreCase("fields")) {
-            Map<String, Object> fields = (Map<String, Object>) conditions.get("fields");
-            for (String fieldName : fields.keySet()) {
-              Object values = fields.get(fieldName);
-              if (values instanceof String) {
-                allow = isFieldConditionMatch(fieldName, (String) values);
-              } else {
-                List<String> listValues = (List<String>) values;
-                for (String stringValue : listValues) {
-                  allow = isFieldConditionMatch(fieldName, stringValue);
-                  if (allow) {
-                    break;
-                  }
-                }
-              }
-              if (allow) {
-                break;
-              }
-            }
-          }
-          if (allow) {
-            break;
-          }
-        }
-        isEnabled = allow;
-      }
-    }
-    return isEnabled;
-  }
-
   public boolean isFieldConditionMatch(String fieldName, String stringValue) {
     boolean allow = false;
     String fieldValue = (String) configs.get(fieldName);
@@ -207,27 +132,17 @@
     return retValue;
   }
 
+  @Override
+  public boolean isEnabled() {
+    return getBooleanValue("is_enabled", true);
+  }
+
   public Map<String, String> getContextFields() {
     return contextFields;
   }
 
-  public void incrementStat(int count) {
-    statMetric.value += count;
-  }
-
-  public void logStatForMetric(MetricData metric, String prefixStr) {
-    LogFeederUtil.logStatForMetric(metric, prefixStr, ", key=" + getShortDescription());
-  }
-
-  public synchronized void logStat() {
-    logStatForMetric(statMetric, "Stat");
-  }
-
   public boolean logConfigs(Priority level) {
-    if (level.toInt() == Priority.INFO_INT && !LOG.isInfoEnabled()) {
-      return false;
-    }
-    if (level.toInt() == Priority.DEBUG_INT && !LOG.isDebugEnabled()) {
+    if (!super.logConfigs(level)) {
       return false;
     }
     LOG.log(level, "Printing configuration Block=" + getShortDescription());
@@ -235,12 +150,4 @@
     LOG.log(level, "contextFields=" + contextFields);
     return true;
   }
-
-  public boolean isDrain() {
-    return drain;
-  }
-
-  public void setDrain(boolean drain) {
-    this.drain = drain;
-  }
 }
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigHandler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigHandler.java
new file mode 100644
index 0000000..726ff27
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigHandler.java
@@ -0,0 +1,420 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.common;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.lang.reflect.Type;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.logfeeder.filter.Filter;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputManager;
+import org.apache.ambari.logfeeder.input.InputSimulate;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logfeeder.output.Output;
+import org.apache.ambari.logfeeder.output.OutputManager;
+import org.apache.ambari.logfeeder.util.AliasUtil;
+import org.apache.ambari.logfeeder.util.FileUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.BooleanUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.ambari.logfeeder.util.AliasUtil.AliasType;
+import org.apache.ambari.logsearch.config.api.InputConfigMonitor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.FilterDescriptorImpl;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputConfigImpl;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputDescriptorImpl;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+import com.google.gson.reflect.TypeToken;
+
+public class ConfigHandler implements InputConfigMonitor {
+  private static final Logger LOG = Logger.getLogger(ConfigHandler.class);
+
+  private final OutputManager outputManager = new OutputManager();
+  private final InputManager inputManager = new InputManager();
+
+  private final Map<String, Object> globalConfigs = new HashMap<>();
+  private final List<String> globalConfigJsons = new ArrayList<String>();
+
+  private final List<InputDescriptor> inputConfigList = new ArrayList<>();
+  private final List<FilterDescriptor> filterConfigList = new ArrayList<>();
+  private final List<Map<String, Object>> outputConfigList = new ArrayList<>();
+  
+  private boolean simulateMode = false;
+  
+  public ConfigHandler() {}
+  
+  public void init() throws Exception {
+    loadConfigFiles();
+    loadOutputs();
+    simulateIfNeeded();
+    
+    inputManager.init();
+    outputManager.init();
+  }
+  
+  private void loadConfigFiles() throws Exception {
+    List<String> configFiles = getConfigFiles();
+    for (String configFileName : configFiles) {
+      LOG.info("Going to load config file:" + configFileName);
+      configFileName = configFileName.replace("\\ ", "%20");
+      File configFile = new File(configFileName);
+      if (configFile.exists() && configFile.isFile()) {
+        LOG.info("Config file exists in path." + configFile.getAbsolutePath());
+        loadConfigsUsingFile(configFile);
+      } else {
+        LOG.info("Trying to load config file from classloader: " + configFileName);
+        loadConfigsUsingClassLoader(configFileName);
+        LOG.info("Loaded config file from classloader: " + configFileName);
+      }
+    }
+  }
+
+  private List<String> getConfigFiles() {
+    List<String> configFiles = new ArrayList<>();
+    
+    String logfeederConfigFilesProperty = LogFeederUtil.getStringProperty("logfeeder.config.files");
+    LOG.info("logfeeder.config.files=" + logfeederConfigFilesProperty);
+    if (logfeederConfigFilesProperty != null) {
+      configFiles.addAll(Arrays.asList(logfeederConfigFilesProperty.split(",")));
+    }
+
+    String inputConfigDir = LogFeederUtil.getStringProperty("input_config_dir");
+    if (StringUtils.isNotEmpty(inputConfigDir)) {
+      File configDirFile = new File(inputConfigDir);
+      List<File> inputConfigFiles = FileUtil.getAllFileFromDir(configDirFile, "json", false);
+      for (File inputConfigFile : inputConfigFiles) {
+        configFiles.add(inputConfigFile.getAbsolutePath());
+      }
+    }
+    
+    if (CollectionUtils.isEmpty(configFiles)) {
+      String configFileProperty = LogFeederUtil.getStringProperty("config.file", "config.json");
+      configFiles.addAll(Arrays.asList(configFileProperty.split(",")));
+    }
+    
+    return configFiles;
+  }
+
+  private void loadConfigsUsingFile(File configFile) throws Exception {
+    try {
+      String configData = FileUtils.readFileToString(configFile, Charset.defaultCharset());
+      loadConfigs(configData);
+    } catch (Exception t) {
+      LOG.error("Error opening config file. configFilePath=" + configFile.getAbsolutePath());
+      throw t;
+    }
+  }
+
+  private void loadConfigsUsingClassLoader(String configFileName) throws Exception {
+    try (BufferedInputStream fis = (BufferedInputStream) this.getClass().getClassLoader().getResourceAsStream(configFileName)) {
+      String configData = IOUtils.toString(fis, Charset.defaultCharset());
+      loadConfigs(configData);
+    }
+  }
+  
+  @Override
+  public void loadInputConfigs(String serviceName, InputConfig inputConfig) throws Exception {
+    inputConfigList.clear();
+    filterConfigList.clear();
+    
+    inputConfigList.addAll(inputConfig.getInput());
+    filterConfigList.addAll(inputConfig.getFilter());
+    
+    if (simulateMode) {
+      InputSimulate.loadTypeToFilePath(inputConfigList);
+    } else {
+      loadInputs(serviceName);
+      loadFilters(serviceName);
+      assignOutputsToInputs(serviceName);
+      
+      inputManager.startInputs(serviceName);
+    }
+  }
+
+  @Override
+  public void removeInputs(String serviceName) {
+    inputManager.removeInputsForService(serviceName);
+  }
+
+  @SuppressWarnings("unchecked")
+  public void loadConfigs(String configData) throws Exception {
+    Type type = new TypeToken<Map<String, Object>>() {}.getType();
+    Map<String, Object> configMap = LogFeederUtil.getGson().fromJson(configData, type);
+
+    // Get the globals
+    for (String key : configMap.keySet()) {
+      switch (key) {
+        case "global" :
+          globalConfigs.putAll((Map<String, Object>) configMap.get(key));
+          globalConfigJsons.add(configData);
+          break;
+        case "output" :
+          List<Map<String, Object>> outputConfig = (List<Map<String, Object>>) configMap.get(key);
+          outputConfigList.addAll(outputConfig);
+          break;
+        default :
+          LOG.warn("Unknown config key: " + key);
+      }
+    }
+  }
+  
+  @Override
+  public List<String> getGlobalConfigJsons() {
+    return globalConfigJsons;
+  }
+  
+  private void simulateIfNeeded() throws Exception {
+    int simulatedInputNumber = LogFeederUtil.getIntProperty("logfeeder.simulate.input_number", 0);
+    if (simulatedInputNumber == 0)
+      return;
+    
+    InputConfigImpl simulateInputConfig = new InputConfigImpl();
+    List<InputDescriptorImpl> inputConfigDescriptors = new ArrayList<>();
+    simulateInputConfig.setInput(inputConfigDescriptors);
+    simulateInputConfig.setFilter(new ArrayList<FilterDescriptorImpl>());
+    for (int i = 0; i < simulatedInputNumber; i++) {
+      InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {};
+      inputDescriptor.setSource("simulate");
+      inputDescriptor.setRowtype("service");
+      inputDescriptor.setAddFields(new HashMap<String, String>());
+      inputConfigDescriptors.add(inputDescriptor);
+    }
+    
+    loadInputConfigs("Simulation", simulateInputConfig);
+    
+    simulateMode = true;
+  }
+
+  private void loadOutputs() {
+    for (Map<String, Object> map : outputConfigList) {
+      if (map == null) {
+        continue;
+      }
+      mergeBlocks(globalConfigs, map);
+
+      String value = (String) map.get("destination");
+      if (StringUtils.isEmpty(value)) {
+        LOG.error("Output block doesn't have destination element");
+        continue;
+      }
+      Output output = (Output) AliasUtil.getClassInstance(value, AliasType.OUTPUT);
+      if (output == null) {
+        LOG.error("Output object could not be found");
+        continue;
+      }
+      output.setDestination(value);
+      output.loadConfig(map);
+
+      // We will only check for is_enabled out here. Down below we will check whether this output is enabled for the input
+      if (output.isEnabled()) {
+        output.logConfigs(Level.INFO);
+        outputManager.add(output);
+      } else {
+        LOG.info("Output is disabled. So ignoring it. " + output.getShortDescription());
+      }
+    }
+  }
+
+  private void loadInputs(String serviceName) {
+    for (InputDescriptor inputDescriptor : inputConfigList) {
+      if (inputDescriptor == null) {
+        continue;
+      }
+
+      String source = (String) inputDescriptor.getSource();
+      if (StringUtils.isEmpty(source)) {
+        LOG.error("Input block doesn't have source element");
+        continue;
+      }
+      Input input = (Input) AliasUtil.getClassInstance(source, AliasType.INPUT);
+      if (input == null) {
+        LOG.error("Input object could not be found");
+        continue;
+      }
+      input.setType(source);
+      input.loadConfig(inputDescriptor);
+
+      if (input.isEnabled()) {
+        input.setOutputManager(outputManager);
+        input.setInputManager(inputManager);
+        inputManager.add(serviceName, input);
+        input.logConfigs(Level.INFO);
+      } else {
+        LOG.info("Input is disabled. So ignoring it. " + input.getShortDescription());
+      }
+    }
+  }
+
+  private void loadFilters(String serviceName) {
+    sortFilters();
+
+    List<Input> toRemoveInputList = new ArrayList<Input>();
+    for (Input input : inputManager.getInputList(serviceName)) {
+      for (FilterDescriptor filterDescriptor : filterConfigList) {
+        if (filterDescriptor == null) {
+          continue;
+        }
+        if (BooleanUtils.isFalse(filterDescriptor.isEnabled())) {
+          LOG.debug("Ignoring filter " + filterDescriptor.getFilter() + " because it is disabled");
+          continue;
+        }
+        if (!input.isFilterRequired(filterDescriptor)) {
+          LOG.debug("Ignoring filter " + filterDescriptor.getFilter() + " for input " + input.getShortDescription());
+          continue;
+        }
+
+        String value = filterDescriptor.getFilter();
+        if (StringUtils.isEmpty(value)) {
+          LOG.error("Filter block doesn't have filter element");
+          continue;
+        }
+        Filter filter = (Filter) AliasUtil.getClassInstance(value, AliasType.FILTER);
+        if (filter == null) {
+          LOG.error("Filter object could not be found");
+          continue;
+        }
+        filter.loadConfig(filterDescriptor);
+        filter.setInput(input);
+
+        filter.setOutputManager(outputManager);
+        input.addFilter(filter);
+        filter.logConfigs(Level.INFO);
+      }
+      
+      if (input.getFirstFilter() == null) {
+        toRemoveInputList.add(input);
+      }
+    }
+
+    for (Input toRemoveInput : toRemoveInputList) {
+      LOG.warn("There are no filters, we will ignore this input. " + toRemoveInput.getShortDescription());
+      inputManager.removeInput(toRemoveInput);
+    }
+  }
+
+  private void sortFilters() {
+    Collections.sort(filterConfigList, new Comparator<FilterDescriptor>() {
+      @Override
+      public int compare(FilterDescriptor o1, FilterDescriptor o2) {
+        Integer o1Sort = o1.getSortOrder();
+        Integer o2Sort = o2.getSortOrder();
+        if (o1Sort == null || o2Sort == null) {
+          return 0;
+        }
+        
+        return o1Sort - o2Sort;
+      }
+    } );
+  }
+
+  private void assignOutputsToInputs(String serviceName) {
+    Set<Output> usedOutputSet = new HashSet<Output>();
+    for (Input input : inputManager.getInputList(serviceName)) {
+      for (Output output : outputManager.getOutputs()) {
+        if (input.isOutputRequired(output)) {
+          usedOutputSet.add(output);
+          input.addOutput(output);
+        }
+      }
+    }
+    
+    // In case of simulation copies of the output are added for each simulation instance, these must be added to the manager
+    for (Output output : InputSimulate.getSimulateOutputs()) {
+      outputManager.add(output);
+      usedOutputSet.add(output);
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  private void mergeBlocks(Map<String, Object> fromMap, Map<String, Object> toMap) {
+    for (String key : fromMap.keySet()) {
+      Object objValue = fromMap.get(key);
+      if (objValue == null) {
+        continue;
+      }
+      if (objValue instanceof Map) {
+        Map<String, Object> globalFields = LogFeederUtil.cloneObject((Map<String, Object>) objValue);
+
+        Map<String, Object> localFields = (Map<String, Object>) toMap.get(key);
+        if (localFields == null) {
+          localFields = new HashMap<String, Object>();
+          toMap.put(key, localFields);
+        }
+
+        if (globalFields != null) {
+          for (String fieldKey : globalFields.keySet()) {
+            if (!localFields.containsKey(fieldKey)) {
+              localFields.put(fieldKey, globalFields.get(fieldKey));
+            }
+          }
+        }
+      }
+    }
+
+    // Let's add the rest of the top level fields if missing
+    for (String key : fromMap.keySet()) {
+      if (!toMap.containsKey(key)) {
+        toMap.put(key, fromMap.get(key));
+      }
+    }
+  }
+
+  public void cleanCheckPointFiles() {
+    inputManager.cleanCheckPointFiles();
+  }
+
+  public void logStats() {
+    inputManager.logStats();
+    outputManager.logStats();
+  }
+  
+  public void addMetrics(List<MetricData> metricsList) {
+    inputManager.addMetricsContainers(metricsList);
+    outputManager.addMetricsContainers(metricsList);
+  }
+
+  public void waitOnAllInputs() {
+    inputManager.waitOnAllInputs();
+  }
+
+  public void close() {
+    inputManager.close();
+    outputManager.close();
+    inputManager.checkInAll();
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigItem.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigItem.java
new file mode 100644
index 0000000..5c20a8e
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigItem.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.common;
+
+import java.util.List;
+
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.log4j.Logger;
+import org.apache.log4j.Priority;
+
+public abstract class ConfigItem {
+
+  protected static final Logger LOG = Logger.getLogger(ConfigBlock.class);
+  private boolean drain = false;
+  public MetricData statMetric = new MetricData(getStatMetricName(), false);
+
+  public ConfigItem() {
+    super();
+  }
+
+  protected String getStatMetricName() {
+    return null;
+  }
+
+  /**
+   * Used while logging. Keep it short and meaningful
+   */
+  public abstract String getShortDescription();
+
+  /**
+   * Every implementor need to give name to the thread they create
+   */
+  public String getNameForThread() {
+    return this.getClass().getSimpleName();
+  }
+
+  public void addMetricsContainers(List<MetricData> metricsList) {
+    metricsList.add(statMetric);
+  }
+
+  /**
+   * This method needs to be overwritten by deriving classes.
+   */
+  public void init() throws Exception {
+  }
+
+  public abstract boolean isEnabled();
+
+  public void incrementStat(int count) {
+    statMetric.value += count;
+  }
+
+  public void logStatForMetric(MetricData metric, String prefixStr) {
+    LogFeederUtil.logStatForMetric(metric, prefixStr, ", key=" + getShortDescription());
+  }
+
+  public synchronized void logStat() {
+    logStatForMetric(statMetric, "Stat");
+  }
+
+  public boolean logConfigs(Priority level) {
+    if (level.toInt() == Priority.INFO_INT && !LOG.isInfoEnabled()) {
+      return false;
+    }
+    if (level.toInt() == Priority.DEBUG_INT && !LOG.isDebugEnabled()) {
+      return false;
+    }
+    return true;
+  }
+
+  public boolean isDrain() {
+    return drain;
+  }
+
+  public void setDrain(boolean drain) {
+    this.drain = drain;
+  }
+
+}
\ No newline at end of file
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
index afd903e..fd02497 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
@@ -24,7 +24,7 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.common.ConfigBlock;
+import org.apache.ambari.logfeeder.common.ConfigItem;
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
@@ -33,18 +33,28 @@
 import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.ambari.logfeeder.util.AliasUtil;
 import org.apache.ambari.logfeeder.util.AliasUtil.AliasType;
-import org.apache.log4j.Logger;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.PostMapValues;
+import org.apache.commons.lang.BooleanUtils;
 import org.apache.log4j.Priority;
 
-public abstract class Filter extends ConfigBlock {
-  private static final Logger LOG = Logger.getLogger(Filter.class);
-
+public abstract class Filter extends ConfigItem {
+  protected FilterDescriptor filterDescriptor;
   protected Input input;
   private Filter nextFilter = null;
   private OutputManager outputManager;
 
   private Map<String, List<Mapper>> postFieldValueMappers = new HashMap<String, List<Mapper>>();
 
+  public void loadConfig(FilterDescriptor filterDescriptor) {
+    this.filterDescriptor = filterDescriptor;
+  }
+
+  public FilterDescriptor getFilterDescriptor() {
+    return filterDescriptor;
+  }
+
   @Override
   public void init() throws Exception {
     super.init();
@@ -55,28 +65,22 @@
     }
   }
 
-  @SuppressWarnings("unchecked")
   private void initializePostMapValues() {
-    Map<String, Object> postMapValues = (Map<String, Object>) getConfigValue("post_map_values");
+    Map<String, ? extends List<? extends PostMapValues>> postMapValues = filterDescriptor.getPostMapValues();
     if (postMapValues == null) {
       return;
     }
     for (String fieldName : postMapValues.keySet()) {
-      List<Map<String, Object>> mapList = null;
-      Object values = postMapValues.get(fieldName);
-      if (values instanceof List<?>) {
-        mapList = (List<Map<String, Object>>) values;
-      } else {
-        mapList = new ArrayList<Map<String, Object>>();
-        mapList.add((Map<String, Object>) values);
-      }
-      for (Map<String, Object> mapObject : mapList) {
-        for (String mapClassCode : mapObject.keySet()) {
+      List<? extends PostMapValues> values = postMapValues.get(fieldName);
+      for (PostMapValues pmv : values) {
+        for (MapFieldDescriptor mapFieldDescriptor : pmv.getMappers()) {
+          String mapClassCode = mapFieldDescriptor.getJsonName();
           Mapper mapper = (Mapper) AliasUtil.getClassInstance(mapClassCode, AliasType.MAPPER);
           if (mapper == null) {
-            break;
+            LOG.warn("Unknown mapper type: " + mapClassCode);
+            continue;
           }
-          if (mapper.init(getInput().getShortDescription(), fieldName, mapClassCode, mapObject.get(mapClassCode))) {
+          if (mapper.init(getInput().getShortDescription(), fieldName, mapClassCode, mapFieldDescriptor)) {
             List<Mapper> fieldMapList = postFieldValueMappers.get(fieldName);
             if (fieldMapList == null) {
               fieldMapList = new ArrayList<Mapper>();
@@ -156,15 +160,8 @@
   }
 
   @Override
-  public boolean isFieldConditionMatch(String fieldName, String stringValue) {
-    if (!super.isFieldConditionMatch(fieldName, stringValue)) {
-      if (input != null) {
-        return input.isFieldConditionMatch(fieldName, stringValue);
-      } else {
-        return false;
-      }
-    }
-    return true;
+  public boolean isEnabled() {
+    return BooleanUtils.isNotFalse(filterDescriptor.isEnabled());
   }
 
   @Override
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
index 7e2da70..70aea65 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
@@ -38,6 +38,8 @@
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterGrokDescriptor;
+import org.apache.commons.lang3.BooleanUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
@@ -75,11 +77,10 @@
     super.init();
 
     try {
-      messagePattern = escapePattern(getStringValue("message_pattern"));
-      multilinePattern = escapePattern(getStringValue("multiline_pattern"));
-      sourceField = getStringValue("source_field");
-      removeSourceField = getBooleanValue("remove_source_field",
-        removeSourceField);
+      messagePattern = escapePattern(((FilterGrokDescriptor)filterDescriptor).getMessagePattern());
+      multilinePattern = escapePattern(((FilterGrokDescriptor)filterDescriptor).getMultilinePattern());
+      sourceField = ((FilterGrokDescriptor)filterDescriptor).getSourceField();
+      removeSourceField = BooleanUtils.toBooleanDefaultIfNull(filterDescriptor.isRemoveSourceField(), removeSourceField);
 
       LOG.info("init() done. grokPattern=" + messagePattern + ", multilinePattern=" + multilinePattern + ", " +
       getShortDescription());
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
index 35f692e..cfccdeb 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
@@ -25,12 +25,9 @@
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.util.DateUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.log4j.Logger;
 
 public class FilterJSON extends Filter {
   
-  private static final Logger LOG  = Logger.getLogger(FilterJSON.class);
-
   @Override
   public void apply(String inputStr, InputMarker inputMarker) throws LogfeederException {
     Map<String, Object> jsonMap = null;
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
index b04a439..f2a4186 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
@@ -28,13 +28,11 @@
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterKeyValueDescriptor;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
 
 public class FilterKeyValue extends Filter {
-  private static final Logger LOG = Logger.getLogger(FilterKeyValue.class);
-
   private String sourceField = null;
   private String valueSplit = "=";
   private String fieldSplit = "\t";
@@ -46,10 +44,10 @@
   public void init() throws Exception {
     super.init();
 
-    sourceField = getStringValue("source_field");
-    valueSplit = getStringValue("value_split", valueSplit);
-    fieldSplit = getStringValue("field_split", fieldSplit);
-    valueBorders = getStringValue("value_borders");
+    sourceField = filterDescriptor.getSourceField();
+    valueSplit = StringUtils.defaultString(((FilterKeyValueDescriptor)filterDescriptor).getValueSplit(), valueSplit);
+    fieldSplit = StringUtils.defaultString(((FilterKeyValueDescriptor)filterDescriptor).getFieldSplit(), fieldSplit);
+    valueBorders = ((FilterKeyValueDescriptor)filterDescriptor).getValueBorders();
 
     LOG.info("init() done. source_field=" + sourceField + ", value_split=" + valueSplit + ", " + ", field_split=" +
         fieldSplit + ", " + getShortDescription());
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/AbstractInputFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/AbstractInputFile.java
index 41a1fa5..cfa1903 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/AbstractInputFile.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/AbstractInputFile.java
@@ -29,14 +29,14 @@
 import java.util.Map;
 
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileBaseDescriptor;
+import org.apache.commons.lang.BooleanUtils;
+import org.apache.commons.lang.ObjectUtils;
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
 
 public abstract class AbstractInputFile extends Input {
-  protected static final Logger LOG = Logger.getLogger(AbstractInputFile.class);
-
   private static final int DEFAULT_CHECKPOINT_INTERVAL_MS = 5 * 1000;
 
   protected File[] logFiles;
@@ -73,16 +73,16 @@
 
     // Let's close the file and set it to true after we start monitoring it
     setClosed(true);
-    logPath = getStringValue("path");
-    tail = getBooleanValue("tail", tail);
-    checkPointIntervalMS = getIntValue("checkpoint.interval.ms", DEFAULT_CHECKPOINT_INTERVAL_MS);
+    logPath = inputDescriptor.getPath();
+    tail = BooleanUtils.toBooleanDefaultIfNull(inputDescriptor.isTail(), tail);
+    checkPointIntervalMS = (int) ObjectUtils.defaultIfNull(((InputFileBaseDescriptor)inputDescriptor).getCheckpointIntervalMs(), DEFAULT_CHECKPOINT_INTERVAL_MS);
 
     if (StringUtils.isEmpty(logPath)) {
       LOG.error("path is empty for file input. " + getShortDescription());
       return;
     }
 
-    String startPosition = getStringValue("start_position");
+    String startPosition = inputDescriptor.getStartPosition();
     if (StringUtils.isEmpty(startPosition) || startPosition.equalsIgnoreCase("beginning") ||
         startPosition.equalsIgnoreCase("begining") || !tail) {
       isStartFromBegining = true;
@@ -313,7 +313,7 @@
 
   @Override
   public String getShortDescription() {
-    return "input:source=" + getStringValue("source") + ", path=" +
+    return "input:source=" + inputDescriptor.getSource() + ", path=" +
         (!ArrayUtils.isEmpty(logFiles) ? logFiles[0].getAbsolutePath() : logPath);
   }
 }
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
index 9f54d8a..fba596d 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
@@ -21,23 +21,25 @@
 
 import java.io.File;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.ambari.logfeeder.input.cache.LRUCache;
-import org.apache.ambari.logfeeder.common.ConfigBlock;
+import org.apache.ambari.logfeeder.common.ConfigItem;
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.output.Output;
 import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.log4j.Logger;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.Conditions;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.Fields;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor;
+import org.apache.commons.lang.BooleanUtils;
+import org.apache.log4j.Priority;
 
-public abstract class Input extends ConfigBlock implements Runnable {
-  private static final Logger LOG = Logger.getLogger(Input.class);
-
+public abstract class Input extends ConfigItem implements Runnable {
   private static final boolean DEFAULT_TAIL = true;
   private static final boolean DEFAULT_USE_EVENT_MD5 = false;
   private static final boolean DEFAULT_GEN_EVENT_MD5 = true;
@@ -47,12 +49,8 @@
   private static final long DEFAULT_CACHE_DEDUP_INTERVAL = 1000;
   private static final String DEFAULT_CACHE_KEY_FIELD = "log_message";
 
-  private static final String CACHE_ENABLED = "cache_enabled";
-  private static final String CACHE_KEY_FIELD = "cache_key_field";
-  private static final String CACHE_LAST_DEDUP_ENABLED = "cache_last_dedup_enabled";
-  private static final String CACHE_SIZE = "cache_size";
-  private static final String CACHE_DEDUP_INTERVAL = "cache_dedup_interval";
-
+  protected InputDescriptor inputDescriptor;
+  
   protected InputManager inputManager;
   protected OutputManager outputManager;
   private List<Output> outputList = new ArrayList<Output>();
@@ -75,21 +73,12 @@
     return null;
   }
   
-  @Override
-  public void loadConfig(Map<String, Object> map) {
-    super.loadConfig(map);
-    String typeValue = getStringValue("type");
-    if (typeValue != null) {
-      // Explicitly add type and value to field list
-      contextFields.put("type", typeValue);
-      @SuppressWarnings("unchecked")
-      Map<String, Object> addFields = (Map<String, Object>) map.get("add_fields");
-      if (addFields == null) {
-        addFields = new HashMap<String, Object>();
-        map.put("add_fields", addFields);
-      }
-      addFields.put("type", typeValue);
-    }
+  public void loadConfig(InputDescriptor inputDescriptor) {
+    this.inputDescriptor = inputDescriptor;
+  }
+
+  public InputDescriptor getInputDescriptor() {
+    return inputDescriptor;
   }
 
   public void setType(String type) {
@@ -104,6 +93,12 @@
     this.outputManager = outputManager;
   }
 
+  public boolean isFilterRequired(FilterDescriptor filterDescriptor) {
+    Conditions conditions = filterDescriptor.getConditions();
+    Fields fields = conditions.getFields();
+    return fields.getType().contains(inputDescriptor.getType());
+  }
+
   public void addFilter(Filter filter) {
     if (firstFilter == null) {
       firstFilter = filter;
@@ -116,6 +111,22 @@
     }
   }
 
+  @SuppressWarnings("unchecked")
+  public boolean isOutputRequired(Output output) {
+    Map<String, Object> conditions = (Map<String, Object>) output.getConfigs().get("conditions");
+    if (conditions == null) {
+      return false;
+    }
+    
+    Map<String, Object> fields = (Map<String, Object>) conditions.get("fields");
+    if (fields == null) {
+      return false;
+    }
+    
+    List<String> types = (List<String>) fields.get("rowtype");
+    return types.contains(inputDescriptor.getRowtype());
+  }
+
   public void addOutput(Output output) {
     outputList.add(output);
   }
@@ -124,9 +135,9 @@
   public void init() throws Exception {
     super.init();
     initCache();
-    tail = getBooleanValue("tail", DEFAULT_TAIL);
-    useEventMD5 = getBooleanValue("use_event_md5_as_id", DEFAULT_USE_EVENT_MD5);
-    genEventMD5 = getBooleanValue("gen_event_md5", DEFAULT_GEN_EVENT_MD5);
+    tail = BooleanUtils.toBooleanDefaultIfNull(inputDescriptor.isTail(), DEFAULT_TAIL);
+    useEventMD5 = BooleanUtils.toBooleanDefaultIfNull(inputDescriptor.isUseEventMd5AsId(), DEFAULT_USE_EVENT_MD5);
+    genEventMD5 = BooleanUtils.toBooleanDefaultIfNull(inputDescriptor.isGenEventMd5(), DEFAULT_GEN_EVENT_MD5);
 
     if (firstFilter != null) {
       firstFilter.init();
@@ -236,26 +247,26 @@
   }
 
   private void initCache() {
-    boolean cacheEnabled = getConfigValue(CACHE_ENABLED) != null
-      ? getBooleanValue(CACHE_ENABLED, DEFAULT_CACHE_ENABLED)
+    boolean cacheEnabled = inputDescriptor.isCacheEnabled() != null
+      ? inputDescriptor.isCacheEnabled()
       : LogFeederUtil.getBooleanProperty("logfeeder.cache.enabled", DEFAULT_CACHE_ENABLED);
     if (cacheEnabled) {
-      String cacheKeyField = getConfigValue(CACHE_KEY_FIELD) != null
-        ? getStringValue(CACHE_KEY_FIELD)
+      String cacheKeyField = inputDescriptor.getCacheKeyField() != null
+        ? inputDescriptor.getCacheKeyField()
         : LogFeederUtil.getStringProperty("logfeeder.cache.key.field", DEFAULT_CACHE_KEY_FIELD);
 
-      setCacheKeyField(getStringValue(cacheKeyField));
+      setCacheKeyField(cacheKeyField);
 
-      boolean cacheLastDedupEnabled = getConfigValue(CACHE_LAST_DEDUP_ENABLED) != null
-        ? getBooleanValue(CACHE_LAST_DEDUP_ENABLED, DEFAULT_CACHE_DEDUP_LAST)
+      boolean cacheLastDedupEnabled = inputDescriptor.getCacheLastDedupEnabled() != null
+        ? inputDescriptor.getCacheLastDedupEnabled()
         : LogFeederUtil.getBooleanProperty("logfeeder.cache.last.dedup.enabled", DEFAULT_CACHE_DEDUP_LAST);
 
-      int cacheSize = getConfigValue(CACHE_SIZE) != null
-        ? getIntValue(CACHE_SIZE, DEFAULT_CACHE_SIZE)
+      int cacheSize = inputDescriptor.getCacheSize() != null
+        ? inputDescriptor.getCacheSize()
         : LogFeederUtil.getIntProperty("logfeeder.cache.size", DEFAULT_CACHE_SIZE);
 
-      long cacheDedupInterval = getConfigValue(CACHE_DEDUP_INTERVAL) != null
-        ? getLongValue(CACHE_DEDUP_INTERVAL, DEFAULT_CACHE_DEDUP_INTERVAL)
+      long cacheDedupInterval = inputDescriptor.getCacheDedupInterval() != null
+        ? inputDescriptor.getCacheDedupInterval()
         : Long.parseLong(LogFeederUtil.getStringProperty("logfeeder.cache.dedup.interval", String.valueOf(DEFAULT_CACHE_DEDUP_INTERVAL)));
 
       setCache(new LRUCache(cacheSize, filePath, cacheDedupInterval, cacheLastDedupEnabled));
@@ -319,6 +330,11 @@
   }
 
   @Override
+  public boolean isEnabled() {
+    return BooleanUtils.isNotFalse(inputDescriptor.isEnabled());
+  }
+
+  @Override
   public String getNameForThread() {
     if (filePath != null) {
       try {
@@ -331,7 +347,17 @@
   }
 
   @Override
+  public boolean logConfigs(Priority level) {
+    if (!super.logConfigs(level)) {
+      return false;
+    }
+    LOG.log(level, "Printing Input=" + getShortDescription());
+    LOG.log(level, "description=" + inputDescriptor.getPath());
+    return true;
+  }
+
+  @Override
   public String toString() {
     return getShortDescription();
   }
-}
+}
\ No newline at end of file
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputConfigUploader.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputConfigUploader.java
new file mode 100644
index 0000000..8aec690
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputConfigUploader.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.input;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.nio.charset.Charset;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.log4j.Logger;
+
+import com.google.common.io.Files;
+
+public class InputConfigUploader extends Thread {
+  protected static final Logger LOG = Logger.getLogger(InputConfigUploader.class);
+
+  private static final long SLEEP_BETWEEN_CHECK = 2000;
+
+  private final File configDir;
+  private final FilenameFilter inputConfigFileFilter = new FilenameFilter() {
+    @Override
+    public boolean accept(File dir, String name) {
+      return name.startsWith("input.config-") && name.endsWith(".json");
+    }
+  };
+  private final Set<String> filesHandled = new HashSet<>();
+  private final Pattern serviceNamePattern = Pattern.compile("input.config-(.+).json");
+  private final LogSearchConfig config;
+  private final String clusterName = LogFeederUtil.getStringProperty("cluster.name");
+  
+  public static void load(LogSearchConfig config) {
+    new InputConfigUploader(config).start();
+  }
+  
+  private InputConfigUploader(LogSearchConfig config) {
+    super("Input Config Loader");
+    setDaemon(true);
+    
+    this.configDir = new File(LogFeederUtil.getStringProperty("logfeeder.config.dir"));
+    this.config = config;
+  }
+  
+  @Override
+  public void run() {
+    while (true) {
+      File[] inputConfigFiles = configDir.listFiles(inputConfigFileFilter);
+      for (File inputConfigFile : inputConfigFiles) {
+        if (!filesHandled.contains(inputConfigFile.getAbsolutePath())) {
+          try {
+            Matcher m = serviceNamePattern.matcher(inputConfigFile.getName());
+            m.find();
+            String serviceName = m.group(1);
+            String inputConfig = Files.toString(inputConfigFile, Charset.defaultCharset());
+            
+            if (!config.inputConfigExists(clusterName, serviceName)) {
+              config.createInputConfig(clusterName, serviceName, inputConfig);
+            }
+            filesHandled.add(inputConfigFile.getAbsolutePath());
+          } catch (Exception e) {
+            LOG.warn("Error handling file " + inputConfigFile.getAbsolutePath(), e);
+          }
+        }
+      }
+      
+      try {
+        Thread.sleep(SLEEP_BETWEEN_CHECK);
+      } catch (InterruptedException e) {
+        LOG.debug("Interrupted during sleep", e);
+      }
+    }
+  }
+}
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
index 3737839..fc40ca4 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
@@ -25,7 +25,9 @@
 
 import org.apache.ambari.logfeeder.input.reader.LogsearchReaderFactory;
 import org.apache.ambari.logfeeder.util.FileUtil;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileDescriptor;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
+import org.apache.commons.lang.BooleanUtils;
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.solr.common.util.Base64;
 
@@ -62,7 +64,7 @@
 
   @Override
   void start() throws Exception {
-    boolean isProcessFile = getBooleanValue("process_file", true);
+    boolean isProcessFile = BooleanUtils.toBooleanDefaultIfNull(((InputFileDescriptor)inputDescriptor).getProcessFile(), true);
     if (isProcessFile) {
       if (tail) {
         processFile(logFiles[0]);
@@ -100,7 +102,7 @@
   }
 
   private void copyFiles(File[] files) {
-    boolean isCopyFile = getBooleanValue("copy_file", false);
+    boolean isCopyFile = BooleanUtils.toBooleanDefaultIfNull(((InputFileDescriptor)inputDescriptor).getCopyFile(), false);
     if (isCopyFile && files != null) {
       for (File file : files) {
         try {
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
index 8e70850..8c76785 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
@@ -25,6 +25,7 @@
 import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
@@ -46,101 +47,163 @@
   private static final String CHECKPOINT_SUBFOLDER_NAME = "logfeeder_checkpoints";
   public static final String DEFAULT_CHECKPOINT_EXTENSION = ".cp";
   
-  private List<Input> inputList = new ArrayList<Input>();
+  private Map<String, List<Input>> inputs = new HashMap<>();
   private Set<Input> notReadyList = new HashSet<Input>();
 
   private boolean isDrain = false;
-  private boolean isAnyInputTail = false;
-
-  private File checkPointFolderFile = null;
-
-  private MetricData filesCountMetric = new MetricData("input.files.count", true);
 
   private String checkPointExtension;
-  
-  private Thread inputIsReadyMonitor = null;
+  private File checkPointFolderFile;
 
-  public List<Input> getInputList() {
-    return inputList;
+  private MetricData filesCountMetric = new MetricData("input.files.count", true);
+  
+  private Thread inputIsReadyMonitor;
+
+  public List<Input> getInputList(String serviceName) {
+    return inputs.get(serviceName);
   }
 
-  public void add(Input input) {
+  public void add(String serviceName, Input input) {
+    List<Input> inputList = inputs.get(serviceName);
+    if (inputList == null) {
+      inputList = new ArrayList<>();
+      inputs.put(serviceName, inputList);
+    }
     inputList.add(input);
   }
 
+  public void removeInputsForService(String serviceName) {
+    List<Input> inputList = inputs.get(serviceName);
+    for (Input input : inputList) {
+      input.setDrain(true);
+    }
+    inputList.clear();
+    inputs.remove(serviceName);
+  }
+
   public void removeInput(Input input) {
     LOG.info("Trying to remove from inputList. " + input.getShortDescription());
-    Iterator<Input> iter = inputList.iterator();
-    while (iter.hasNext()) {
-      Input iterInput = iter.next();
-      if (iterInput.equals(input)) {
-        LOG.info("Removing Input from inputList. " + input.getShortDescription());
-        iter.remove();
+    for (List<Input> inputList : inputs.values()) {
+      Iterator<Input> iter = inputList.iterator();
+      while (iter.hasNext()) {
+        Input iterInput = iter.next();
+        if (iterInput.equals(input)) {
+          LOG.info("Removing Input from inputList. " + input.getShortDescription());
+          iter.remove();
+        }
       }
     }
   }
 
   private int getActiveFilesCount() {
     int count = 0;
-    for (Input input : inputList) {
-      if (input.isReady()) {
-        count++;
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        if (input.isReady()) {
+          count++;
+        }
       }
     }
     return count;
   }
 
   public void init() {
+    initCheckPointSettings();
+    startMonitorThread();
+  }
+  
+  private void initCheckPointSettings() {
     checkPointExtension = LogFeederUtil.getStringProperty("logfeeder.checkpoint.extension", DEFAULT_CHECKPOINT_EXTENSION);
-    for (Input input : inputList) {
+    LOG.info("Determining valid checkpoint folder");
+    boolean isCheckPointFolderValid = false;
+    // We need to keep track of the files we are reading.
+    String checkPointFolder = LogFeederUtil.getStringProperty("logfeeder.checkpoint.folder");
+    if (!StringUtils.isEmpty(checkPointFolder)) {
+      checkPointFolderFile = new File(checkPointFolder);
+      isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+    }
+    if (!isCheckPointFolderValid) {
+      // Let's try home folder
+      String userHome = LogFeederUtil.getStringProperty("user.home");
+      if (userHome != null) {
+        checkPointFolderFile = new File(userHome, CHECKPOINT_SUBFOLDER_NAME);
+        LOG.info("Checking if home folder can be used for checkpoints. Folder=" + checkPointFolderFile);
+        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+      }
+    }
+    if (!isCheckPointFolderValid) {
+      // Let's use tmp folder
+      String tmpFolder = LogFeederUtil.getStringProperty("java.io.tmpdir");
+      if (tmpFolder == null) {
+        tmpFolder = "/tmp";
+      }
+      checkPointFolderFile = new File(tmpFolder, CHECKPOINT_SUBFOLDER_NAME);
+      LOG.info("Checking if tmps folder can be used for checkpoints. Folder=" + checkPointFolderFile);
+      isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+      if (isCheckPointFolderValid) {
+        LOG.warn("Using tmp folder " + checkPointFolderFile + " to store check points. This is not recommended." +
+            "Please set logfeeder.checkpoint.folder property");
+      }
+    }
+    
+    if (isCheckPointFolderValid) {
+      LOG.info("Using folder " + checkPointFolderFile + " for storing checkpoints");
+    }
+  }
+
+  private void startMonitorThread() {
+    inputIsReadyMonitor = new Thread("InputIsReadyMonitor") {
+      @Override
+      public void run() {
+        LOG.info("Going to monitor for these missing files: " + notReadyList.toString());
+        while (true) {
+          if (isDrain) {
+            LOG.info("Exiting missing file monitor.");
+            break;
+          }
+          try {
+            Iterator<Input> iter = notReadyList.iterator();
+            while (iter.hasNext()) {
+              Input input = iter.next();
+              try {
+                if (input.isReady()) {
+                  input.monitor();
+                  iter.remove();
+                }
+              } catch (Throwable t) {
+                LOG.error("Error while enabling monitoring for input. " + input.getShortDescription());
+              }
+            }
+            Thread.sleep(30 * 1000);
+          } catch (Throwable t) {
+            // Ignore
+          }
+        }
+      }
+    };
+    
+    inputIsReadyMonitor.start();
+  }
+  
+  public void startInputs(String serviceName) {
+    for (Input input : inputs.get(serviceName)) {
       try {
         input.init();
-        if (input.isTail()) {
-          isAnyInputTail = true;
+        if (input.isReady()) {
+          input.monitor();
+        } else {
+          if (input.isTail()) {
+            LOG.info("Adding input to not ready list. Note, it is possible this component is not run on this host. " +
+                "So it might not be an issue. " + input.getShortDescription());
+            notReadyList.add(input);
+          } else {
+            LOG.info("Input is not ready, so going to ignore it " + input.getShortDescription());
+          }
         }
       } catch (Exception e) {
         LOG.error("Error initializing input. " + input.getShortDescription(), e);
       }
     }
-
-    if (isAnyInputTail) {
-      LOG.info("Determining valid checkpoint folder");
-      boolean isCheckPointFolderValid = false;
-      // We need to keep track of the files we are reading.
-      String checkPointFolder = LogFeederUtil.getStringProperty("logfeeder.checkpoint.folder");
-      if (!StringUtils.isEmpty(checkPointFolder)) {
-        checkPointFolderFile = new File(checkPointFolder);
-        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-      }
-      if (!isCheckPointFolderValid) {
-        // Let's try home folder
-        String userHome = LogFeederUtil.getStringProperty("user.home");
-        if (userHome != null) {
-          checkPointFolderFile = new File(userHome, CHECKPOINT_SUBFOLDER_NAME);
-          LOG.info("Checking if home folder can be used for checkpoints. Folder=" + checkPointFolderFile);
-          isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-        }
-      }
-      if (!isCheckPointFolderValid) {
-        // Let's use tmp folder
-        String tmpFolder = LogFeederUtil.getStringProperty("java.io.tmpdir");
-        if (tmpFolder == null) {
-          tmpFolder = "/tmp";
-        }
-        checkPointFolderFile = new File(tmpFolder, CHECKPOINT_SUBFOLDER_NAME);
-        LOG.info("Checking if tmps folder can be used for checkpoints. Folder=" + checkPointFolderFile);
-        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-        if (isCheckPointFolderValid) {
-          LOG.warn("Using tmp folder " + checkPointFolderFile + " to store check points. This is not recommended." +
-              "Please set logfeeder.checkpoint.folder property");
-        }
-      }
-
-      if (isCheckPointFolderValid) {
-        LOG.info("Using folder " + checkPointFolderFile + " for storing checkpoints");
-      }
-    }
-
   }
 
   private boolean verifyCheckPointFolder(File folderPathFile) {
@@ -171,70 +234,25 @@
     return checkPointFolderFile;
   }
 
-  public void monitor() {
-    for (Input input : inputList) {
-      if (input.isReady()) {
-        input.monitor();
-      } else {
-        if (input.isTail()) {
-          LOG.info("Adding input to not ready list. Note, it is possible this component is not run on this host. " +
-              "So it might not be an issue. " + input.getShortDescription());
-          notReadyList.add(input);
-        } else {
-          LOG.info("Input is not ready, so going to ignore it " + input.getShortDescription());
-        }
-      }
-    }
-    // Start the monitoring thread if any file is in tail mode
-    if (isAnyInputTail) {
-       inputIsReadyMonitor = new Thread("InputIsReadyMonitor") {
-        @Override
-        public void run() {
-          LOG.info("Going to monitor for these missing files: " + notReadyList.toString());
-          while (true) {
-            if (isDrain) {
-              LOG.info("Exiting missing file monitor.");
-              break;
-            }
-            try {
-              Iterator<Input> iter = notReadyList.iterator();
-              while (iter.hasNext()) {
-                Input input = iter.next();
-                try {
-                  if (input.isReady()) {
-                    input.monitor();
-                    iter.remove();
-                  }
-                } catch (Throwable t) {
-                  LOG.error("Error while enabling monitoring for input. " + input.getShortDescription());
-                }
-              }
-              Thread.sleep(30 * 1000);
-            } catch (Throwable t) {
-              // Ignore
-            }
-          }
-        }
-      };
-      inputIsReadyMonitor.start();
-    }
-  }
-
   void addToNotReady(Input notReadyInput) {
     notReadyList.add(notReadyInput);
   }
 
   public void addMetricsContainers(List<MetricData> metricsList) {
-    for (Input input : inputList) {
-      input.addMetricsContainers(metricsList);
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        input.addMetricsContainers(metricsList);
+      }
     }
     filesCountMetric.value = getActiveFilesCount();
     metricsList.add(filesCountMetric);
   }
 
   public void logStats() {
-    for (Input input : inputList) {
-      input.logStat();
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        input.logStat();
+      }
     }
 
     filesCountMetric.value = getActiveFilesCount();
@@ -308,14 +326,16 @@
 
   public void waitOnAllInputs() {
     //wait on inputs
-    for (Input input : inputList) {
-      if (input != null) {
-        Thread inputThread = input.getThread();
-        if (inputThread != null) {
-          try {
-            inputThread.join();
-          } catch (InterruptedException e) {
-            // ignore
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        if (input != null) {
+          Thread inputThread = input.getThread();
+          if (inputThread != null) {
+            try {
+              inputThread.join();
+            } catch (InterruptedException e) {
+              // ignore
+            }
           }
         }
       }
@@ -332,17 +352,21 @@
   }
 
   public void checkInAll() {
-    for (Input input : inputList) {
-      input.lastCheckIn();
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        input.lastCheckIn();
+      }
     }
   }
 
   public void close() {
-    for (Input input : inputList) {
-      try {
-        input.setDrain(true);
-      } catch (Throwable t) {
-        LOG.error("Error while draining. input=" + input.getShortDescription(), t);
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        try {
+          input.setDrain(true);
+        } catch (Throwable t) {
+          LOG.error("Error while draining. input=" + input.getShortDescription(), t);
+        }
       }
     }
     isDrain = true;
@@ -352,14 +376,16 @@
     int waitTimeMS = 1000;
     for (int i = 0; i < iterations; i++) {
       boolean allClosed = true;
-      for (Input input : inputList) {
-        if (!input.isClosed()) {
-          try {
-            allClosed = false;
-            LOG.warn("Waiting for input to close. " + input.getShortDescription() + ", " + (iterations - i) + " more seconds");
-            Thread.sleep(waitTimeMS);
-          } catch (Throwable t) {
-            // Ignore
+      for (List<Input> inputList : inputs.values()) {
+        for (Input input : inputList) {
+          if (!input.isClosed()) {
+            try {
+              allClosed = false;
+              LOG.warn("Waiting for input to close. " + input.getShortDescription() + ", " + (iterations - i) + " more seconds");
+              Thread.sleep(waitTimeMS);
+            } catch (Throwable t) {
+              // Ignore
+            }
           }
         }
       }
@@ -370,9 +396,11 @@
     }
     
     LOG.warn("Some inputs were not closed after " + iterations + " iterations");
-    for (Input input : inputList) {
-      if (!input.isClosed()) {
-        LOG.warn("Input not closed. Will ignore it." + input.getShortDescription());
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        if (!input.isClosed()) {
+          LOG.warn("Input not closed. Will ignore it." + input.getShortDescription());
+        }
       }
     }
   }
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
index f560379..4bf162b 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
@@ -23,6 +23,7 @@
 import java.io.IOException;
 
 import org.apache.ambari.logfeeder.util.S3Util;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputS3FileDescriptor;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.solr.common.util.Base64;
 
@@ -78,8 +79,8 @@
 
   @Override
   protected BufferedReader openLogFile(File logPathFile) throws IOException {
-    String s3AccessKey = getStringValue("s3_access_key");
-    String s3SecretKey = getStringValue("s3_secret_key");
+    String s3AccessKey = ((InputS3FileDescriptor)inputDescriptor).getS3AccessKey();
+    String s3SecretKey = ((InputS3FileDescriptor)inputDescriptor).getS3SecretKey();
     BufferedReader br = S3Util.getReader(logPathFile.getPath(), s3AccessKey, s3SecretKey);
     fileKey = getFileKey(logPathFile);
     base64FileKey = Base64.byteArrayToBase64(fileKey.toString().getBytes());
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
index 2222f93..5e7bdb3 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
@@ -21,7 +21,6 @@
 import java.net.InetAddress;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collections;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
@@ -35,22 +34,23 @@
 import org.apache.ambari.logfeeder.filter.FilterJSON;
 import org.apache.ambari.logfeeder.output.Output;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.log4j.Logger;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.FilterJsonDescriptorImpl;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputDescriptorImpl;
+import org.apache.commons.collections.MapUtils;
 import org.apache.solr.common.util.Base64;
 
 import com.google.common.base.Joiner;
 
 public class InputSimulate extends Input {
-  private static final Logger LOG = Logger.getLogger(InputSimulate.class);
-
   private static final String LOG_TEXT_PATTERN = "{ logtime=\"%d\", level=\"%s\", log_message=\"%s\", host=\"%s\"}";
   
   private static final Map<String, String> typeToFilePath = new HashMap<>();
-  public static void loadTypeToFilePath(List<Map<String, Object>> inputList) {
-    for (Map<String, Object> input : inputList) {
-      if (input.containsKey("type") && input.containsKey("path")) {
-        typeToFilePath.put((String)input.get("type"), (String)input.get("path"));
-      }
+  private static final List<String> inputTypes = new ArrayList<>();
+  public static void loadTypeToFilePath(List<InputDescriptor> inputList) {
+    for (InputDescriptor input : inputList) {
+      typeToFilePath.put(input.getType(), input.getPath());
+      inputTypes.add(input.getType());
     }
   }
   
@@ -83,20 +83,16 @@
     this.host = "#" + hostNumber.incrementAndGet() + "-" + LogFeederUtil.hostName;
     
     Filter filter = new FilterJSON();
-    filter.loadConfig(Collections.<String, Object> emptyMap());
+    filter.loadConfig(new FilterJsonDescriptorImpl());
     filter.setInput(this);
     addFilter(filter);
   }
   
   private List<String> getSimulatedLogTypes() {
     String logsToSimulate = LogFeederUtil.getStringProperty("logfeeder.simulate.log_ids");
-    if (logsToSimulate == null) {
-      return new ArrayList<>(typeToFilePath.keySet());
-    } else {
-      List<String> simulatedLogTypes = Arrays.asList(logsToSimulate.split(","));
-      simulatedLogTypes.retainAll(typeToFilePath.keySet());
-      return simulatedLogTypes;
-    }
+    return (logsToSimulate == null) ?
+      inputTypes :
+      Arrays.asList(logsToSimulate.split(","));
   }
 
   @Override
@@ -120,11 +116,12 @@
 
   @Override
   void start() throws Exception {
-    if (types.isEmpty())
-      return;
-    
     getFirstFilter().setOutputManager(outputManager);
     while (true) {
+      if (types.isEmpty()) {
+        try { Thread.sleep(sleepMillis); } catch(Exception e) { /* Ignore */ }
+        continue;
+      }
       String type = imitateRandomLogFile();
       
       String line = getLine();
@@ -139,9 +136,9 @@
   private String imitateRandomLogFile() {
     int typePos = random.nextInt(types.size());
     String type = types.get(typePos);
-    String filePath = typeToFilePath.get(type);
+    String filePath = MapUtils.getString(typeToFilePath, type, "path of " + type);
     
-    configs.put("type", type);
+    ((InputDescriptorImpl)inputDescriptor).setType(type);
     setFilePath(filePath);
     
     return type;
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FilterLogData.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FilterLogData.java
deleted file mode 100644
index a05a916..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FilterLogData.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.logconfig;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.ambari.logfeeder.common.LogFeederConstants;
-import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.collections.MapUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.log4j.Logger;
-
-/**
- * Read configuration from solr and filter the log
- */
-public enum FilterLogData {
-  INSTANCE;
-  
-  private static final Logger LOG = Logger.getLogger(FilterLogData.class);
-  
-  private static final boolean DEFAULT_VALUE = true;
-
-  public boolean isAllowed(String jsonBlock, InputMarker inputMarker) {
-    if (StringUtils.isEmpty(jsonBlock)) {