AMBARI-17822: Create AMBARI_INFRA service definition [move out SOLR from Log Search] (jluniya)
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
index 42ecf6a..6537e43 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
@@ -86,7 +86,7 @@
   RANGER_KERBEROS_SUPPORT = "ranger_kerberos_support"
   HIVE_METASTORE_SITE_SUPPORT = "hive_metastore_site_support"
   RANGER_USERSYNC_PASSWORD_JCEKS = "ranger_usersync_password_jceks"
-  RANGER_INSTALL_LOGSEARCH_CLIENT = "ranger_install_logsearch_client"
+  RANGER_INSTALL_INFRA_CLIENT = "ranger_install_infra_client"
   HBASE_HOME_DIRECTORY = "hbase_home_directory"
   ATLAS_RANGER_PLUGIN_SUPPORT = "atlas_ranger_plugin_support"
   ATLAS_UPGRADE_SUPPORT = "atlas_upgrade_support"
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py b/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py
index 35758fa..472b8d9 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py
@@ -58,13 +58,13 @@
   config = Script.get_config()
   return _has_applicable_local_component(config, ["METRICS_GRAFANA"])
 
-def should_install_logsearch_solr():
+def should_install_infra_solr():
   config = Script.get_config()
-  return _has_applicable_local_component(config, ["LOGSEARCH_SOLR"])
+  return _has_applicable_local_component(config, ["INFRA_SOLR"])
 
-def should_install_logsearch_solr_client():
+def should_install_infra_solr_client():
   config = Script.get_config()
-  return _has_applicable_local_component(config, ['LOGSEARCH_SOLR_CLIENT', 'ATLAS_SERVER', 'RANGER_ADMIN'])
+  return _has_applicable_local_component(config, ['INFRA_SOLR_CLIENT', 'ATLAS_SERVER', 'RANGER_ADMIN'])
 
 def should_install_logsearch_portal():
   config = Script.get_config()
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py b/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
index 82db1bb..ccf91ef 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
@@ -17,17 +17,18 @@
 
 """
 import random
+from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.core.resources.system import Directory, Execute, File
-from resource_management.core.shell import as_user
 from resource_management.core.source import StaticFile, InlineTemplate
 
 __all__ = ["upload_configuration_to_zk", "create_collection", "setup_kerberos", "set_cluster_prop",
            "setup_kerberos_plugin", "create_znode", "check_znode", "create_sasl_users"]
 
 def __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, separated_znode=False):
-  solr_cli_prefix = format('export JAVA_HOME={java64_home} ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh ' \
+  sudo = AMBARI_SUDO_BINARY
+  solr_cli_prefix = format('{sudo} JAVA_HOME={java64_home} /usr/lib/ambari-infra-solr-client/solrCloudCli.sh ' \
                            '--zookeeper-connect-string {zookeeper_quorum}')
   if separated_znode:
     solr_cli_prefix+=format(' --znode {solr_znode}')
@@ -41,51 +42,43 @@
         command+= " %s %s" % (key, value)
   return command
 
-
 def upload_configuration_to_zk(zookeeper_quorum, solr_znode, config_set, config_set_dir, tmp_dir,
-                         java64_home, user, retry = 5, interval = 10, solrconfig_content = None, jaas_file=None):
+                         java64_home, retry = 5, interval = 10, solrconfig_content = None, jaas_file=None):
   """
   Upload configuration set to zookeeper with solrCloudCli.sh
   At first, it tries to download configuration set if exists into a temporary location, then upload that one to
-  zookeeper. (if the configuration changed there, in that case the user wont redefine it)
-  If the configuration set does not exist in zookeeper then upload it based on the config_set_dir parameter.
+  zookeeper. If the configuration set does not exist in zookeeper then upload it based on the config_set_dir parameter.
   """
   random_num = random.random()
   tmp_config_set_dir = format('{tmp_dir}/solr_config_{config_set}_{random_num}')
   solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home)
   Execute(format('{solr_cli_prefix} --download-config --config-dir {tmp_config_set_dir} --config-set {config_set} --retry {retry} --interval {interval}'),
-          only_if=as_user(format("{solr_cli_prefix} --check-config --config-set {config_set} --retry {retry} --interval {interval}"), user),
-          user=user
-          )
+          only_if=format("{solr_cli_prefix} --check-config --config-set {config_set} --retry {retry} --interval {interval}"))
   appendableDict = {}
   appendableDict["--jaas-file"] = jaas_file
 
   if solrconfig_content is not None:
       File(format("{tmp_config_set_dir}/solrconfig.xml"),
        content=solrconfig_content,
-       owner=user,
        only_if=format("test -d {tmp_config_set_dir}")
       )
       upload_tmp_config_cmd = format('{solr_cli_prefix} --upload-config --config-dir {tmp_config_set_dir} --config-set {config_set} --retry {retry} --interval {interval}')
       upload_tmp_config_cmd = __append_flags_if_exists(upload_tmp_config_cmd, appendableDict)
       Execute(upload_tmp_config_cmd,
-        user=user,
         only_if=format("test -d {tmp_config_set_dir}")
       )
   upload_config_cmd = format('{solr_cli_prefix} --upload-config --config-dir {config_set_dir} --config-set {config_set} --retry {retry} --interval {interval}')
   upload_config_cmd = __append_flags_if_exists(upload_config_cmd, appendableDict)
   Execute(upload_config_cmd,
-    user=user,
     not_if=format("test -d {tmp_config_set_dir}")
   )
 
   Directory(tmp_config_set_dir,
               action="delete",
-              owner=user,
               create_parents=True
             )
 
-def create_collection(zookeeper_quorum, solr_znode, collection, config_set, java64_home, user,
+def create_collection(zookeeper_quorum, solr_znode, collection, config_set, java64_home,
                       shards = 1, replication_factor = 1, max_shards = 1, retry = 5, interval = 10,
                       router_name = None, router_field = None, jaas_file = None, key_store_location = None,
                       key_store_password = None, key_store_type = None, trust_store_location = None,
@@ -120,9 +113,9 @@
   create_collection_cmd = __append_flags_if_exists(create_collection_cmd, appendableDict)
   create_collection_cmd = format(create_collection_cmd, key_store_password_param=key_store_password, trust_store_password_param=trust_store_password)
 
-  Execute(create_collection_cmd, user=user)
+  Execute(create_collection_cmd)
 
-def setup_kerberos(zookeeper_quorum, solr_znode, copy_from_znode, java64_home, user, secure=False, jaas_file=None):
+def setup_kerberos(zookeeper_quorum, solr_znode, copy_from_znode, java64_home, secure=False, jaas_file=None):
   """
   Copy all unsecured (or secured) Znode content to a secured (or unsecured) Znode,
   and restrict the world permissions there.
@@ -131,25 +124,25 @@
   setup_kerberos_cmd = format('{solr_cli_prefix} --setup-kerberos --copy-from-znode {copy_from_znode}')
   if secure and jaas_file is not None:
     setup_kerberos_cmd+=format(' --secure --jaas-file {jaas_file}')
-  Execute(setup_kerberos_cmd, user=user)
+  Execute(setup_kerberos_cmd)
 
-def check_znode(zookeeper_quorum, solr_znode, java64_home, user, retry = 5, interval = 10):
+def check_znode(zookeeper_quorum, solr_znode, java64_home, retry = 5, interval = 10):
   """
   Check znode exists or not, throws exception if does not accessible.
   """
   solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, True)
   check_znode_cmd = format('{solr_cli_prefix} --check-znode --retry {retry} --interval {interval}')
-  Execute(check_znode_cmd, user=user)
+  Execute(check_znode_cmd)
 
-def create_znode(zookeeper_quorum, solr_znode, java64_home, user, retry = 5 , interval = 10):
+def create_znode(zookeeper_quorum, solr_znode, java64_home, retry = 5 , interval = 10):
   """
   Create znode if does not exists, throws exception if zookeeper is not accessible.
   """
   solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, True)
   create_znode_cmd = format('{solr_cli_prefix} --create-znode --retry {retry} --interval {interval}')
-  Execute(create_znode_cmd, user=user)
+  Execute(create_znode_cmd)
 
-def setup_kerberos_plugin(zookeeper_quorum, solr_znode, java64_home, user, secure=False, jaas_file = None):
+def setup_kerberos_plugin(zookeeper_quorum, solr_znode, java64_home, secure=False, jaas_file = None):
   """
   Set Kerberos plugin on the Solr znode in security.json, if secure is False, then clear the security.json
   """
@@ -157,9 +150,9 @@
   setup_kerberos_plugin_cmd = format('{solr_cli_prefix} --setup-kerberos-plugin')
   if secure and jaas_file is not None:
     setup_kerberos_plugin_cmd+=format(' --jaas-file {jaas_file} --secure')
-  Execute(setup_kerberos_plugin_cmd, user=user)
+  Execute(setup_kerberos_plugin_cmd)
 
-def set_cluster_prop(zookeeper_quorum, solr_znode, prop_name, prop_value, java64_home, user = None, jaas_file = None):
+def set_cluster_prop(zookeeper_quorum, solr_znode, prop_name, prop_value, java64_home, jaas_file = None):
   """
   Set a cluster property on the Solr znode in clusterprops.json
   """
@@ -167,65 +160,51 @@
   set_cluster_prop_cmd = format('{solr_cli_prefix} --cluster-prop --property-name {prop_name} --property-value {prop_value}')
   if jaas_file is not None:
     set_cluster_prop_cmd+=format(' --jaas-file {jaas_file}')
-  Execute(set_cluster_prop_cmd, user=user)
+  Execute(set_cluster_prop_cmd)
 
-def create_sasl_users(zookeeper_quorum, solr_znode, jaas_file, java64_home, user, sasl_users=[]):
+def create_sasl_users(zookeeper_quorum, solr_znode, jaas_file, java64_home, sasl_users=[]):
   """
   Add list of sasl users to a znode
   """
   solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, True)
   sasl_users_str = ",".join(str(x) for x in sasl_users)
   create_sasl_users_cmd = format('{solr_cli_prefix} --create-sasl-users --jaas-file {jaas_file} --sasl-users {sasl_users_str}')
-  Execute(create_sasl_users_cmd, user=user)
+  Execute(create_sasl_users_cmd)
 
-def setup_solr_client(config, user = None, group = None, custom_log4j = True, custom_log_location = None, log4jcontent = None):
-    solr_user = config['configurations']['logsearch-solr-env']['logsearch_solr_user'] if user is None else user
-    solr_group = config['configurations']['cluster-env']['user_group'] if group is None else group
-    solr_client_dir = '/usr/lib/ambari-logsearch-solr-client'
-    solr_client_log_dir = default('/configurations/logsearch-solr-env/logsearch_solr_client_log_dir', '/var/log/ambari-logsearch-solr-client') if custom_log_location is None else custom_log_location
+def setup_solr_client(config, custom_log4j = True, custom_log_location = None, log4jcontent = None):
+    solr_client_dir = '/usr/lib/ambari-infra-solr-client'
+    solr_client_log_dir = default('/configurations/infra-solr-env/infra_solr_client_log_dir', '/var/log/ambari-infra-solr-client') if custom_log_location is None else custom_log_location
     solr_client_log = format("{solr_client_log_dir}/solr-client.log")
 
     Directory(solr_client_log_dir,
                 mode=0755,
                 cd_access='a',
-                owner=solr_user,
-                group=solr_group,
                 create_parents=True
                 )
     Directory(solr_client_dir,
                 mode=0755,
                 cd_access='a',
-                owner=solr_user,
-                group=solr_group,
                 create_parents=True,
                 recursive_ownership=True
                 )
     solrCliFilename = format("{solr_client_dir}/solrCloudCli.sh")
     File(solrCliFilename,
          mode=0755,
-         owner=solr_user,
-         group=solr_group,
          content=StaticFile(solrCliFilename)
          )
     if custom_log4j:
-      # use custom log4j content only, when logsearch is not installed on the cluster
-      solr_client_log4j_content = config['configurations']['logsearch-solr-client-log4j']['content'] if log4jcontent is None else log4jcontent
+      # use custom log4j content only, when infra is not installed on the cluster
+      solr_client_log4j_content = config['configurations']['infra-solr-client-log4j']['content'] if log4jcontent is None else log4jcontent
       File(format("{solr_client_dir}/log4j.properties"),
              content=InlineTemplate(solr_client_log4j_content),
-             owner=solr_user,
-             group=solr_group,
              mode=0644
              )
     else:
         File(format("{solr_client_dir}/log4j.properties"),
-             owner=solr_user,
-             group=solr_group,
              mode=0644
              )
 
     File(solr_client_log,
          mode=0664,
-         owner=solr_user,
-         group=solr_group,
          content=''
          )
diff --git a/ambari-logsearch/ambari-logsearch-assembly/pom.xml b/ambari-logsearch/ambari-logsearch-assembly/pom.xml
index e0f6a09..b3b3ca0 100644
--- a/ambari-logsearch/ambari-logsearch-assembly/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-assembly/pom.xml
@@ -29,9 +29,9 @@
   <properties>
     <mapping.base.path>/usr/lib</mapping.base.path>
     <solr.tar>http://archive.apache.org/dist/lucene/solr/${solr.version}/solr-${solr.version}.tgz</solr.tar>
-    <solr.mapping.path>${mapping.base.path}/ambari-logsearch-solr</solr.mapping.path>
-    <solr.package.name>ambari-logsearch-solr</solr.package.name>
-    <solr.client.package.name>ambari-logsearch-solr-client</solr.client.package.name>
+    <solr.mapping.path>${mapping.base.path}/ambari-infra-solr</solr.mapping.path>
+    <solr.package.name>ambari-infra-solr</solr.package.name>
+    <solr.client.package.name>ambari-infra-solr-client</solr.client.package.name>
     <solr.client.mapping.path>${mapping.base.path}/${solr.client.package.name}</solr.client.mapping.path>
     <solr.client.dir>${project.basedir}/../ambari-logsearch-solr-client</solr.client.dir>
     <logsearch.portal.package.name>ambari-logsearch-portal</logsearch.portal.package.name>
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
index 5f6a91c..c644cd5 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
@@ -136,7 +136,7 @@
     TimelineMetric metric = new TimelineMetric();
     metric.setMetricName(metricName);
     metric.setHostName(solrHost);
-    metric.setAppId("logsearch-solr");
+    metric.setAppId("infra-solr");
     metric.setStartTime(currMS);
     metric.setType(type);
     metric.setTimestamp(currMS);
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/HadoopServiceConfig.json b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/HadoopServiceConfig.json
index d8c36e5..819af25 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/HadoopServiceConfig.json
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/HadoopServiceConfig.json
@@ -175,6 +175,16 @@
         }
       ]
     },
+    "infra" : {
+      "label" : "Infra",
+      "components": [
+        {
+          "name": "infra_solr"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
     "kafka": {
       "label": "Kafka",
       "components": [
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/Role.java b/ambari-server/src/main/java/org/apache/ambari/server/Role.java
index a1a425c..1fceb29 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/Role.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/Role.java
@@ -120,7 +120,7 @@
   public static final Role NIMBUS = valueOf("NIMBUS");
   public static final Role RANGER_KMS_SERVER = valueOf("RANGER_KMS_SERVER");
   public static final Role LOGSEARCH_SERVER = valueOf("LOGSEARCH_SERVER");
-  public static final Role LOGSEARCH_SOLR = valueOf("LOGSEARCH_SOLR");
+  public static final Role INFRA_SOLR = valueOf("INFRA_SOLR");
   public static final Role LOGSEARCH_LOGFEEDER = valueOf("LOGSEARCH_LOGFEEDER");
   public static final Role INSTALL_PACKAGES = valueOf("install_packages");
   public static final Role UPDATE_REPO = valueOf("update_repo");
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/alerts.json b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/alerts.json
new file mode 100644
index 0000000..cf85d9f
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/alerts.json
@@ -0,0 +1,37 @@
+{
+  "AMBARI_INFRA": {
+    "INFRA_SOLR": [
+      {
+        "name": "infra_solr",
+        "label": "Infra Solr Web UI",
+        "description": "This host-level alert is triggered if the Solr Cloud Instance is unreachable.",
+        "interval": 1,
+        "scope": "ANY",
+        "source": {
+          "type": "WEB",
+          "uri": {
+            "http": "{{infra-solr-env/infra_solr_port}}",
+            "https": "{{infra-solr-env/infra_solr_port}}",
+            "https_property": "{{infra-solr-env/infra_solr_ssl_enabled}}",
+            "https_property_value": "true",
+            "connection_timeout": 5.0,
+            "kerberos_keytab": "{{infra-solr-env/infra_solr_web_kerberos_keytab}}",
+            "kerberos_principal": "{{infra-solr-env/infra_solr_web_kerberos_principal}}",
+            "default_port": 8886
+          },
+          "reporting": {
+            "ok": {
+              "text": "HTTP {0} response in {2:.3f}s"
+            },
+            "warning": {
+              "text": "HTTP {0} response from {1} in {2:.3f}s ({3})"
+            },
+            "critical": {
+              "text": "Connection failed to {1} ({3})"
+            }
+          }
+        }
+      }
+    ]
+  }
+}
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-client-log4j.xml b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-client-log4j.xml
similarity index 79%
rename from ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-client-log4j.xml
rename to ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-client-log4j.xml
index d1f7e9e..f597492 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-client-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-client-log4j.xml
@@ -23,9 +23,19 @@
 <configuration supports_adding_forbidden="true">
   <!-- log4j.xml -->
   <property>
+    <name>infra_solr_client_log_dir</name>
+    <value>/var/log/ambari-infra-solr-client</value>
+    <description>Directory for Solr client logs</description>
+    <display-name>Infra Solr Client log dir</display-name>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
     <name>content</name>
     <display-name>log4j template</display-name>
-    <description>This is the jinja template for log4j.properties file for logsearch solr client</description>
+    <description>This is the jinja template for log4j.properties file for infra solr client</description>
     <value/>
     <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
     <value-attributes>
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-env.xml b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-env.xml
new file mode 100644
index 0000000..dfbaac9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-env.xml
@@ -0,0 +1,235 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<!-- This is a special config file for properties used to monitor status of the service -->
+<configuration supports_adding_forbidden="true">
+  <property>
+    <name>infra_solr_port</name>
+    <value>8886</value>
+    <description>Solr port</description>
+    <display-name>Infra Solr port</display-name>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_jmx_port</name>
+    <value>18886</value>
+    <description>Solr JMX port</description>
+    <display-name>Infra Solr JMX port</display-name>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_pid_dir</name>
+    <value>/var/run/ambari-infra-solr</value>
+    <description>Solr Process ID Directory</description>
+    <display-name>Infra Solr pid dir</display-name>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_log_dir</name>
+    <value>/var/log/ambari-infra-solr</value>
+    <description>Directory for Solr logs</description>
+    <display-name>Infra Solr log dir</display-name>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_user</name>
+    <value>infra-solr</value>
+    <property-type>USER</property-type>
+    <description>Solr user</description>
+    <display-name>Infra Solr User</display-name>
+    <value-attributes>
+      <type>user</type>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_datadir</name>
+    <value>/opt/ambari_infra_solr/data</value>
+    <display-name>Infra Solr data dir</display-name>
+    <description>Directory for storting Solr index. Make sure you have enough disk space</description>
+    <value-attributes>
+      <type>directory</type>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_ssl_enabled</name>
+    <value>false</value>
+    <display-name>Enable SSL to Infra Solr</display-name>
+    <description>Enable ssl to Solr</description>
+    <value-attributes>
+      <type>boolean</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_truststore_location</name>
+    <value>/etc/security/serverKeys/infra.solr.trustStore.jks</value>
+    <display-name>Infra Solr trust store location</display-name>
+    <description>Location of the trust store file. (default value is not generated)</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_truststore_type</name>
+    <value>jks</value>
+    <display-name>Infra Solr trust store type</display-name>
+    <description>Type of the trust store file.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_truststore_password</name>
+    <value>bigdata</value>
+    <property-type>PASSWORD</property-type>
+    <display-name>Infra Solr trust store password</display-name>
+    <description>Password to open the trust store file.</description>
+    <value-attributes>
+      <type>password</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_keystore_location</name>
+    <value>/etc/security/serverKeys/infra.solr.keyStore.jks</value>
+    <display-name>Infra Solr key store location</display-name>
+    <description>Location of the key store file. (default value is not generated)</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_keystore_type</name>
+    <value>jks</value>
+    <display-name>Infra Solr key store type</display-name>
+    <description>Type of the key store file.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_keystore_password</name>
+    <value>bigdata</value>
+    <display-name>Infra Solr key store password</display-name>
+    <property-type>PASSWORD</property-type>
+    <description>Password to open the key store file.</description>
+    <value-attributes>
+      <type>password</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_znode</name>
+    <value>/infra-solr</value>
+    <description>Zookeeper znode, e.g: /ambari-solr</description>
+    <display-name>Infra Solr ZNode</display-name>
+    <value-attributes>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_minmem</name>
+    <value>1024</value>
+    <display-name>Infra Solr Minimum Heap Size</display-name>
+    <description>Solr minimum heap size e.g. 512m</description>
+    <value-attributes>
+      <type>int</type>
+      <minimum>512</minimum>
+      <maximum>5120</maximum>
+      <unit>MB</unit>
+      <increment-step>256</increment-step>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_maxmem</name>
+    <value>2048</value>
+    <display-name>Infra Solr Maximum Heap Size</display-name>
+    <description>Solr maximum heap size e.g. 512m</description>
+    <value-attributes>
+      <type>int</type>
+      <minimum>512</minimum>
+      <maximum>5120</maximum>
+      <unit>MB</unit>
+      <increment-step>256</increment-step>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>infra_solr_kerberos_keytab</name>
+    <value>/etc/security/keytabs/infra_solr.service.keytab</value>
+    <display-name>Infra Solr keytab</display-name>
+    <description>The path to the Kerberos Keytab file containing service principal of the Infra Solr.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>infra_solr_kerberos_principal</name>
+    <value>infra-solr</value>
+    <display-name>Infra Solr principal</display-name>
+    <description>The service principal for Infra Solr.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>infra_solr_web_kerberos_keytab</name>
+    <value>/etc/security/keytabs/spnego.service.keytab</value>
+    <display-name>Infra Solr Http keytab</display-name>
+    <description>The path to the Kerberos Keytab file containing service principal of the Infra Solr.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>infra_solr_web_kerberos_principal</name>
+    <value>HTTP/_HOST@EXAMPLE.COM</value>
+    <display-name>Infra Solr Http principal</display-name>
+    <description>The service principal for the Infra Solr.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>infra_solr_kerberos_name_rules</name>
+    <value>DEFAULT</value>
+    <description>Kerberos name rules for Spnego</description>
+    <value-attributes>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <!-- infra-solr-env.sh -->
+  <property>
+    <name>content</name>
+    <display-name>infra-solr-env template</display-name>
+    <description>This is the jinja template for infra-solr-env.sh file</description>
+    <value/>
+    <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
+    <value-attributes>
+      <property-file-name>infra-solr-env.sh.j2</property-file-name>
+      <property-file-type>text</property-file-type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-log4j.xml b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-log4j.xml
similarity index 95%
rename from ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-log4j.xml
rename to ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-log4j.xml
index be846bd..b192a28 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-log4j.xml
@@ -22,7 +22,7 @@
 <configuration supports_adding_forbidden="true">
   <property>
     <name>content</name>
-    <display-name>logsearch-solr-log4j template</display-name>
+    <display-name>infra-solr-log4j template</display-name>
     <description>This is the jinja template for log4j.properties</description>
     <value/>
     <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-xml.xml b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-xml.xml
similarity index 88%
rename from ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-xml.xml
rename to ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-xml.xml
index 7a5bb5a..1d9d8ef 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-xml.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-xml.xml
@@ -22,8 +22,8 @@
 <configuration supports_adding_forbidden="true">
   <property>
     <name>content</name>
-    <display-name>logsearch-solr-xml template</display-name>
-    <description>This is the jinja template for logsearch solr.xml file</description>
+    <display-name>infra-solr-xml template</display-name>
+    <description>This is the jinja template for Ambari Infrastructure solr.xml file</description>
     <value/>
     <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
     <value-attributes>
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/kerberos.json b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/kerberos.json
new file mode 100644
index 0000000..94b1b14
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/kerberos.json
@@ -0,0 +1,51 @@
+{
+  "services": [
+    {
+      "name": "AMBARI_INFRA",
+      "identities": [
+        {
+          "name": "/smokeuser"
+        },
+        {
+          "name": "/spnego",
+          "principal": {
+            "configuration": "infra-solr-env/infra_solr_web_kerberos_principal"
+          },
+          "keytab": {
+            "configuration": "infra-solr-env/infra_solr_web_kerberos_keytab"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "INFRA_SOLR",
+          "identities": [
+            {
+              "name": "infra-solr",
+              "principal": {
+                "value": "infra-solr/_HOST@${realm}",
+                "type": "service",
+                "configuration": "infra-solr-env/infra_solr_kerberos_principal"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/ambari-infra-solr.service.keytab",
+                "owner": {
+                  "name": "${infra-solr-env/infra_solr_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "infra-solr-env/infra_solr_kerberos_keytab"
+              }
+            }
+          ]
+        },
+        {
+          "name": "INFRA_SOLR_CLIENT"
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/metainfo.xml b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/metainfo.xml
new file mode 100644
index 0000000..a1cee49
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/metainfo.xml
@@ -0,0 +1,143 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>AMBARI_INFRA</name>
+      <displayName>Ambari Infra</displayName>
+      <comment>Core shared service used by Ambari managed components.</comment>
+      <version>0.1.0</version>
+      <components>
+        <component>
+          <name>INFRA_SOLR</name>
+          <timelineAppid>infra-solr</timelineAppid>
+          <displayName>Infra Solr Instance</displayName>
+          <category>MASTER</category>
+          <cardinality>1+</cardinality>
+          <versionAdvertised>false</versionAdvertised>
+          <commandScript>
+            <script>scripts/infra_solr.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <logs>
+            <log>
+              <logId>infra_solr</logId>
+              <primary>true</primary>
+            </log>
+          </logs>
+          <dependencies>
+            <dependency>
+              <name>AMBARI_INFRA/INFRA_SOLR_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+
+          </dependencies>
+          <configuration-dependencies>
+            <config-type>infra-solr-env</config-type>
+            <config-type>infra-solr-xml</config-type>
+            <config-type>infra-solr-log4j</config-type>
+          </configuration-dependencies>
+        </component>
+
+        <component>
+          <name>INFRA_SOLR_CLIENT</name>
+          <displayName>Infra Solr Client</displayName>
+          <category>CLIENT</category>
+          <cardinality>0+</cardinality>
+          <versionAdvertised>false</versionAdvertised>
+          <commandScript>
+            <script>scripts/infra_solr_client.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>env</type>
+              <fileName>log4j.properties</fileName>
+              <dictionaryName>infra-solr-client-log4j</dictionaryName>
+            </configFile>
+          </configFiles>
+          <configuration-dependencies>
+            <config-type>infra-solr-client-log4j</config-type>
+          </configuration-dependencies>
+        </component>
+
+      </components>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>
+          <packages>
+            <package>
+              <name>ambari-infra-solr-client</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+            <package>
+              <name>ambari-infra-solr</name>
+              <condition>should_install_infra_solr</condition>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>debian7,ubuntu12,ubuntu14,ubuntu16</osFamily>
+          <packages>
+            <package>
+              <name>ambari-infra-solr-client</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+            <package>
+              <name>ambari-infra-solr</name>
+              <condition>should_install_infra_solr</condition>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <requiredServices>
+        <service>ZOOKEEPER</service>
+      </requiredServices>
+
+      <themes>
+        <theme>
+          <fileName>theme.json</fileName>
+          <default>true</default>
+        </theme>
+      </themes>
+
+    </service>
+  </services>
+</metainfo>
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/infra_solr.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/infra_solr.py
new file mode 100644
index 0000000..532be47
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/infra_solr.py
@@ -0,0 +1,110 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.core.resources.system import Execute, File
+from resource_management.core.logger import Logger
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.check_process_status import check_process_status
+from resource_management.libraries.functions.get_user_call_output import get_user_call_output
+from resource_management.libraries.functions.show_logs import show_logs
+from setup_infra_solr import setup_infra_solr
+import sys
+
+class InfraSolr(Script):
+  def install(self, env):
+    import params
+    env.set_params(params)
+    self.install_packages(env)
+
+  def configure(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    setup_infra_solr(name = 'server')
+
+  def start(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+    self.configure(env)
+
+    Execute(
+      format('{solr_bindir}/solr start -cloud -noprompt -s {infra_solr_datadir} >> {infra_solr_log} 2>&1'),
+      environment={'SOLR_INCLUDE': format('{infra_solr_conf}/infra-solr-env.sh')},
+      user=params.infra_solr_user
+    )
+
+  def stop(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    try:
+      Execute(format('{solr_bindir}/solr stop -all >> {infra_solr_log}'),
+              environment={'SOLR_INCLUDE': format('{infra_solr_conf}/infra-solr-env.sh')},
+              user=params.infra_solr_user,
+              only_if=format("test -f {infra_solr_pidfile}")
+              )
+
+      File(params.infra_solr_pidfile,
+           action="delete"
+           )
+    except:
+      Logger.warning("Could not stop solr:" + str(sys.exc_info()[1]) + "\n Trying to kill it")
+      self.kill_process(params.infra_solr_pidfile, params.infra_solr_user, params.infra_solr_log_dir)
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+
+    check_process_status(status_params.infra_solr_pidfile)
+
+  def kill_process(self, pid_file, user, log_dir):
+    import params
+    """
+    Kill the process by pid file, then check the process is running or not. If the process is still running after the kill
+    command, it will try to kill with -9 option (hard kill)
+    """
+    pid = get_user_call_output(format("cat {pid_file}"), user=user, is_checked_call=False)[1]
+    process_id_exists_command = format("ls {pid_file} >/dev/null 2>&1 && ps -p {pid} >/dev/null 2>&1")
+
+    kill_cmd = format("{sudo} kill {pid}")
+    Execute(kill_cmd,
+          not_if=format("! ({process_id_exists_command})"))
+    wait_time = 5
+
+    hard_kill_cmd = format("{sudo} kill -9 {pid}")
+    Execute(hard_kill_cmd,
+          not_if=format("! ({process_id_exists_command}) || ( sleep {wait_time} && ! ({process_id_exists_command}) )"),
+          ignore_failures=True)
+    try:
+      Execute(format("! ({process_id_exists_command})"),
+            tries=20,
+            try_sleep=3,
+            )
+    except:
+      show_logs(log_dir, user)
+      raise
+
+    File(pid_file,
+       action="delete"
+       )
+
+
+if __name__ == "__main__":
+  InfraSolr().execute()
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch_solr_client.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/infra_solr_client.py
similarity index 89%
rename from ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch_solr_client.py
rename to ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/infra_solr_client.py
index 0bbbd3c..37ffcf4 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch_solr_client.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/infra_solr_client.py
@@ -19,9 +19,9 @@
 
 from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.libraries.script.script import Script
-from setup_logsearch_solr import setup_logsearch_solr
+from setup_infra_solr import setup_infra_solr
 
-class LogsearchSolrClient(Script):
+class InfraSolrClient(Script):
 
   def install(self, env):
     import params
@@ -32,7 +32,7 @@
   def configure(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    setup_logsearch_solr(name = 'client')
+    setup_infra_solr(name ='client')
 
   def start(self, env, upgrade_type=None):
     import params
@@ -48,4 +48,4 @@
 
 
 if __name__ == "__main__":
-  LogsearchSolrClient().execute()
\ No newline at end of file
+  InfraSolrClient().execute()
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py
new file mode 100644
index 0000000..898a35c
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py
@@ -0,0 +1,114 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from ambari_commons.constants import AMBARI_SUDO_BINARY
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.is_empty import is_empty
+from resource_management.libraries.script.script import Script
+import os
+import status_params
+
+def get_port_from_url(address):
+  if not is_empty(address):
+    return address.split(':')[-1]
+  else:
+    return address
+
+# config object that holds the configurations declared in the -site.xml file
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+stack_version = default("/commandParams/version", None)
+sudo = AMBARI_SUDO_BINARY
+security_enabled = status_params.security_enabled
+
+infra_solr_conf = "/etc/ambari-infra-solr/conf"
+
+infra_solr_port = status_params.infra_solr_port
+infra_solr_piddir = status_params.infra_solr_piddir
+infra_solr_pidfile = status_params.infra_solr_pidfile
+
+user_group = config['configurations']['cluster-env']['user_group']
+fetch_nonlocal_groups = config['configurations']['cluster-env']["fetch_nonlocal_groups"]
+
+# shared configs
+java64_home = config['hostLevelParams']['java_home']
+zookeeper_hosts_list = config['clusterHostInfo']['zookeeper_hosts']
+zookeeper_hosts_list.sort()
+# get comma separated list of zookeeper hosts from clusterHostInfo
+zookeeper_hosts = ",".join(zookeeper_hosts_list)
+
+#####################################
+# Solr configs
+#####################################
+
+# Only supporting SolrCloud mode - so hardcode those options
+solr_cloudmode = 'true'
+solr_dir = '/usr/lib/ambari-infra-solr'
+solr_client_dir = '/usr/lib/ambari-infra-solr-client'
+solr_bindir = solr_dir + '/bin'
+cloud_scripts = solr_dir + '/server/scripts/cloud-scripts'
+
+if "infra-solr-env" in config['configurations']:
+  infra_solr_znode = config['configurations']['infra-solr-env']['infra_solr_znode']
+  infra_solr_min_mem = format(config['configurations']['infra-solr-env']['infra_solr_minmem'])
+  infra_solr_max_mem = format(config['configurations']['infra-solr-env']['infra_solr_maxmem'])
+  infra_solr_instance_count = len(config['clusterHostInfo']['infra_solr_hosts'])
+  infra_solr_datadir = format(config['configurations']['infra-solr-env']['infra_solr_datadir'])
+  infra_solr_data_resources_dir = os.path.join(infra_solr_datadir, 'resources')
+  infra_solr_jmx_port = config['configurations']['infra-solr-env']['infra_solr_jmx_port']
+  infra_solr_ssl_enabled = default('configurations/infra-solr-env/infra_solr_ssl_enabled', False)
+  infra_solr_keystore_location = config['configurations']['infra-solr-env']['infra_solr_keystore_location']
+  infra_solr_keystore_password = config['configurations']['infra-solr-env']['infra_solr_keystore_password']
+  infra_solr_keystore_type = config['configurations']['infra-solr-env']['infra_solr_keystore_type']
+  infra_solr_truststore_location = config['configurations']['infra-solr-env']['infra_solr_truststore_location']
+  infra_solr_truststore_password = config['configurations']['infra-solr-env']['infra_solr_truststore_password']
+  infra_solr_truststore_type = config['configurations']['infra-solr-env']['infra_solr_truststore_type']
+  infra_solr_user = config['configurations']['infra-solr-env']['infra_solr_user']
+  infra_solr_log_dir = config['configurations']['infra-solr-env']['infra_solr_log_dir']
+  infra_solr_log = format("{infra_solr_log_dir}/solr-install.log")
+  solr_env_content = config['configurations']['infra-solr-env']['content']
+
+zookeeper_port = default('/configurations/zoo.cfg/clientPort', None)
+# get comma separated list of zookeeper hosts from clusterHostInfo
+index = 0
+zookeeper_quorum = ""
+for host in config['clusterHostInfo']['zookeeper_hosts']:
+  zookeeper_quorum += host + ":" + str(zookeeper_port)
+  index += 1
+  if index < len(config['clusterHostInfo']['zookeeper_hosts']):
+    zookeeper_quorum += ","
+
+
+if security_enabled:
+  kinit_path_local = status_params.kinit_path_local
+  _hostname_lowercase = config['hostname'].lower()
+  infra_solr_jaas_file = infra_solr_conf + '/infra_solr_jaas.conf'
+  infra_solr_kerberos_keytab = config['configurations']['infra-solr-env']['infra_solr_kerberos_keytab']
+  infra_solr_kerberos_principal = config['configurations']['infra-solr-env']['infra_solr_kerberos_principal'].replace('_HOST',_hostname_lowercase)
+  infra_solr_web_kerberos_keytab = config['configurations']['infra-solr-env']['infra_solr_web_kerberos_keytab']
+  infra_solr_web_kerberos_principal = config['configurations']['infra-solr-env']['infra_solr_web_kerberos_principal'].replace('_HOST',_hostname_lowercase)
+  infra_solr_kerberos_name_rules = config['configurations']['infra-solr-env']['infra_solr_kerberos_name_rules']
+
+solr_xml_content = default('configurations/infra-solr-xml/content', None)
+solr_log4j_content = default('configurations/infra-solr-log4j/content', None)
+
+solr_client_custom_log4j = "infra-solr-client-log4j" in config['configurations']
+
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/service_check.py
new file mode 100644
index 0000000..a5a94fc
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/service_check.py
@@ -0,0 +1,30 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.script.script import Script
+from resource_management.core.logger import Logger
+
+class InfraServiceCheck(Script):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+    Logger.info('Infra Service Check')
+
+if __name__ == "__main__":
+  InfraServiceCheck().execute()
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
new file mode 100644
index 0000000..41cb504
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
@@ -0,0 +1,115 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.core.exceptions import Fail
+from resource_management.core.source import InlineTemplate, Template
+from resource_management.core.resources.system import Directory, File
+from resource_management.libraries.functions.decorator import retry
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions import solr_cloud_util
+
+def setup_infra_solr(name = None):
+  import params
+
+  if name == 'server':
+    Directory([params.infra_solr_log_dir, params.infra_solr_piddir,
+               params.infra_solr_datadir, params.infra_solr_data_resources_dir],
+              mode=0755,
+              cd_access='a',
+              create_parents=True,
+              owner=params.infra_solr_user,
+              group=params.user_group
+              )
+
+    Directory([params.solr_dir, params.infra_solr_conf],
+              mode=0755,
+              cd_access='a',
+              owner=params.infra_solr_user,
+              group=params.user_group,
+              create_parents=True,
+              recursive_ownership=True
+              )
+
+    File(params.infra_solr_log,
+         mode=0644,
+         owner=params.infra_solr_user,
+         group=params.user_group,
+         content=''
+         )
+
+    File(format("{infra_solr_conf}/infra-solr-env.sh"),
+         content=InlineTemplate(params.solr_env_content),
+         mode=0755,
+         owner=params.infra_solr_user,
+         group=params.user_group
+         )
+
+    File(format("{infra_solr_datadir}/solr.xml"),
+         content=InlineTemplate(params.solr_xml_content),
+         owner=params.infra_solr_user,
+         group=params.user_group
+         )
+
+    File(format("{infra_solr_conf}/log4j.properties"),
+         content=InlineTemplate(params.solr_log4j_content),
+         owner=params.infra_solr_user,
+         group=params.user_group
+         )
+
+    jaas_file = params.infra_solr_jaas_file if params.security_enabled else None
+    url_scheme = 'https' if params.infra_solr_ssl_enabled else 'http'
+
+    create_ambari_solr_znode()
+
+    if params.security_enabled:
+      File(format("{infra_solr_jaas_file}"),
+           content=Template("infra_solr_jaas.conf.j2"),
+           owner=params.infra_solr_user)
+
+    solr_cloud_util.set_cluster_prop(
+      zookeeper_quorum=params.zookeeper_quorum,
+      solr_znode=params.infra_solr_znode,
+      java64_home=params.java64_home,
+      prop_name="urlScheme",
+      prop_value=url_scheme,
+      jaas_file=jaas_file
+    )
+
+    solr_cloud_util.setup_kerberos_plugin(
+      zookeeper_quorum=params.zookeeper_quorum,
+      solr_znode=params.infra_solr_znode,
+      jaas_file=jaas_file,
+      java64_home=params.java64_home,
+      secure=params.security_enabled
+    )
+
+
+  elif name == 'client':
+    solr_cloud_util.setup_solr_client(params.config, custom_log4j=params.solr_client_custom_log4j)
+
+  else :
+    raise Fail('Nor client or server were selected to install.')
+
+@retry(times=30, sleep_time=5, err_class=Fail)
+def create_ambari_solr_znode():
+  import params
+  solr_cloud_util.create_znode(
+    zookeeper_quorum=params.zookeeper_quorum,
+    solr_znode=params.infra_solr_znode,
+    java64_home=params.java64_home)
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/status_params.py
new file mode 100644
index 0000000..f51a321
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/status_params.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.script.script import Script
+
+config = Script.get_config()
+
+infra_solr_port = default('configurations/infra-solr-env/infra_solr_port', '8886')
+infra_solr_piddir = default('configurations/infra-solr-env/infra_solr_pid_dir', '/var/run/ambari-infra-solr')
+infra_solr_pidfile = format("{infra_solr_piddir}/solr-{infra_solr_port}.pid")
+
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch_solr_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/infra_solr_jaas.conf.j2
similarity index 89%
rename from ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch_solr_jaas.conf.j2
rename to ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/infra_solr_jaas.conf.j2
index 0adb025..8f8d711 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch_solr_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/infra_solr_jaas.conf.j2
@@ -21,6 +21,6 @@
  useKeyTab=true
  storeKey=true
  useTicketCache=false
- keyTab="{{logsearch_solr_kerberos_keytab}}"
- principal="{{logsearch_solr_kerberos_principal}}";
+ keyTab="{{infra_solr_kerberos_keytab}}"
+ principal="{{infra_solr_kerberos_principal}}";
 };
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-solr-env.sh.j2 b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-env.sh.j2
similarity index 83%
rename from ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-solr-env.sh.j2
rename to ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-env.sh.j2
index 607c833..d1c1ef9 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-solr-env.sh.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-env.sh.j2
@@ -20,7 +20,7 @@
 SOLR_JAVA_HOME={{java64_home}}
 
 # Increase Java Min/Max Heap as needed to support your indexing / query needs
-SOLR_JAVA_MEM="-Xms{{logsearch_solr_min_mem}}m -Xmx{{logsearch_solr_max_mem}}m"
+SOLR_JAVA_MEM="-Xms{{infra_solr_min_mem}}m -Xmx{{infra_solr_max_mem}}m"
 
 # Enable verbose GC logging
 GC_LOG_OPTS="-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \
@@ -45,7 +45,7 @@
 # Set the ZooKeeper connection string if using an external ZooKeeper ensemble
 # e.g. host1:2181,host2:2181/chroot
 # Leave empty if not using SolrCloud
-ZK_HOST="{{zookeeper_quorum}}{{logsearch_solr_znode}}"
+ZK_HOST="{{zookeeper_quorum}}{{infra_solr_znode}}"
 
 # Set the ZooKeeper client timeout (for SolrCloud mode)
 ZK_CLIENT_TIMEOUT="60000"
@@ -63,7 +63,7 @@
 ENABLE_REMOTE_JMX_OPTS="true"
 
 # The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here
-RMI_PORT={{logsearch_solr_jmx_port}}
+RMI_PORT={{infra_solr_jmx_port}}
 
 # Anything you add to the SOLR_OPTS variable will be included in the java
 # start command line as-is, in ADDITION to other options. If you specify the
@@ -74,30 +74,30 @@
 
 # Location where the bin/solr script will save PID files for running instances
 # If not set, the script will create PID files in $SOLR_TIP/bin
-SOLR_PID_DIR={{logsearch_solr_piddir}}
+SOLR_PID_DIR={{infra_solr_piddir}}
 
 # Path to a directory where Solr creates index files, the specified directory
 # must contain a solr.xml; by default, Solr will use server/solr
-SOLR_HOME={{logsearch_solr_datadir}}
+SOLR_HOME={{infra_solr_datadir}}
 
 # Solr provides a default Log4J configuration properties file in server/resources
 # however, you may want to customize the log settings and file appender location
 # so you can point the script to use a different log4j.properties file
-LOG4J_PROPS={{logsearch_solr_conf}}/log4j.properties
+LOG4J_PROPS={{infra_solr_conf}}/log4j.properties
 
 # Location where Solr should write logs to; should agree with the file appender
 # settings in server/resources/log4j.properties
-SOLR_LOGS_DIR={{logsearch_solr_log_dir}}
+SOLR_LOGS_DIR={{infra_solr_log_dir}}
 
 # Sets the port Solr binds to, default is 8983
-SOLR_PORT={{logsearch_solr_port}}
+SOLR_PORT={{infra_solr_port}}
 
 # Be sure to update the paths to the correct keystore for your environment
-{% if logsearch_solr_ssl_enabled %}
-SOLR_SSL_KEY_STORE={{logsearch_solr_keystore_location}}
-SOLR_SSL_KEY_STORE_PASSWORD={{logsearch_solr_keystore_password}}
-SOLR_SSL_TRUST_STORE={{logsearch_solr_keystore_location}}
-SOLR_SSL_TRUST_STORE_PASSWORD={{logsearch_solr_keystore_password}}
+{% if infra_solr_ssl_enabled %}
+SOLR_SSL_KEY_STORE={{infra_solr_keystore_location}}
+SOLR_SSL_KEY_STORE_PASSWORD={{infra_solr_keystore_password}}
+SOLR_SSL_TRUST_STORE={{infra_solr_keystore_location}}
+SOLR_SSL_TRUST_STORE_PASSWORD={{infra_solr_keystore_password}}
 SOLR_SSL_NEED_CLIENT_AUTH=false
 SOLR_SSL_WANT_CLIENT_AUTH=false
 {% endif %}
@@ -108,10 +108,10 @@
 
 {% if security_enabled -%}
 SOLR_HOST=`hostname -f`
-SOLR_JAAS_FILE={{logsearch_solr_jaas_file}}
-SOLR_KERB_KEYTAB={{logsearch_solr_web_kerberos_keytab}}
-SOLR_KERB_PRINCIPAL={{logsearch_solr_web_kerberos_principal}}
-SOLR_KERB_NAME_RULES={{logsearch_solr_kerberos_name_rules}}
+SOLR_JAAS_FILE={{infra_solr_jaas_file}}
+SOLR_KERB_KEYTAB={{infra_solr_web_kerberos_keytab}}
+SOLR_KERB_PRINCIPAL={{infra_solr_web_kerberos_principal}}
+SOLR_KERB_NAME_RULES={{infra_solr_kerberos_name_rules}}
 
 SOLR_AUTHENTICATION_CLIENT_CONFIGURER="org.apache.solr.client.solrj.impl.Krb5HttpClientConfigurer"
 SOLR_AUTHENTICATION_OPTS=" -DauthenticationPlugin=org.apache.solr.security.KerberosPlugin -Djava.security.auth.login.config=$SOLR_JAAS_FILE -Dsolr.kerberos.principal=${SOLR_KERB_PRINCIPAL} -Dsolr.kerberos.keytab=${SOLR_KERB_KEYTAB} -Dsolr.kerberos.cookie.domain=${SOLR_HOST} -Dsolr.kerberos.name.rules=${SOLR_KERB_NAME_RULES}"
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/solr-client-log4j.properties.j2 b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-client-log4j.properties.j2
similarity index 92%
rename from ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/solr-client-log4j.properties.j2
rename to ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-client-log4j.properties.j2
index 1d04a7c..e980b81 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/solr-client-log4j.properties.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-client-log4j.properties.j2
@@ -19,7 +19,7 @@
 log4j.rootLogger=INFO,file,stdout,stderr
 
 log4j.appender.file=org.apache.log4j.RollingFileAppender
-log4j.appender.file.File={{logsearch_solr_client_log|default('/var/log/ambari-logsearch-solr-client/solr-client.log')}}
+log4j.appender.file.File={{infra_client_log|default('/var/log/ambari-infra-solr-client/solr-client.log')}}
 log4j.appender.file.MaxFileSize=80MB
 log4j.appender.file.MaxBackupIndex=60
 log4j.appender.file.layout=org.apache.log4j.PatternLayout
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/solr-log4j.properties.j2 b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-log4j.properties.j2
similarity index 94%
rename from ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/solr-log4j.properties.j2
rename to ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-log4j.properties.j2
index 1284d58..275203a 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/solr-log4j.properties.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-log4j.properties.j2
@@ -14,7 +14,7 @@
 # limitations under the License.
 
 #  Logging level
-solr.log={{logsearch_solr_log_dir}}
+infra.solr.log.dir={{infra_solr_log_dir}}
 #log4j.rootLogger=INFO, file, CONSOLE
 log4j.rootLogger=WARN, file
 
@@ -29,7 +29,7 @@
 log4j.appender.file.MaxBackupIndex=9
 
 #- File to log to and log format
-log4j.appender.file.File=${solr.log}/solr.log
+log4j.appender.file.File=${infra.solr.log.dir}/solr.log
 log4j.appender.file.layout=org.apache.log4j.PatternLayout
 log4j.appender.file.layout.ConversionPattern=%d{ISO8601} [%t] %-5p [%X{collection} %X{shard} %X{replica} %X{core}] %C (%F:%L) - %m%n
 
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/solr.xml.j2 b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr.xml.j2
similarity index 100%
rename from ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/solr.xml.j2
rename to ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr.xml.j2
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/themes/theme.json b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/themes/theme.json
new file mode 100644
index 0000000..3d16f9a
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/themes/theme.json
@@ -0,0 +1,107 @@
+{
+  "name": "default",
+  "description": "Default theme for AMBARI_INFRA service",
+  "configuration": {
+    "layouts": [
+      {
+        "name": "default",
+        "tabs": [
+          {
+            "name": "settings",
+            "display-name": "Settings",
+            "layout": {
+              "tab-columns": "2",
+              "tab-rows": "1",
+              "sections": [
+                {
+                  "name": "section-infra-solr",
+                  "display-name": "Ambari Infra Solr",
+                  "row-index": "0",
+                  "column-index": "0",
+                  "row-span": "1",
+                  "column-span": "1",
+                  "section-columns": "1",
+                  "section-rows": "4",
+                  "subsections": [
+                    {
+                      "name": "subsection-infra-solr-col1",
+                      "row-index": "0",
+                      "column-index": "0",
+                      "row-span": "1",
+                      "column-span": "1"
+                    },
+                    {
+                      "name": "subsection-infra-solr-col2",
+                      "row-index": "0",
+                      "column-index": "1",
+                      "row-span": "1",
+                      "column-span": "1"
+                    }
+                  ]
+                }
+              ]
+            }
+          }
+        ]
+      }
+    ],
+    "placement": {
+      "configuration-layout": "default",
+      "configs": [
+        {
+          "config": "infra-solr-env/infra_solr_minmem",
+          "subsection-name": "subsection-infra-solr-col1"
+        },
+        {
+          "config": "infra-solr-env/infra_solr_maxmem",
+          "subsection-name": "subsection-infra-solr-col1"
+        },
+        {
+          "config": "infra-solr-env/infra_solr_datadir",
+          "subsection-name": "subsection-infra-solr-col2"
+        },
+        {
+          "config": "infra-solr-env/infra_solr_znode",
+          "subsection-name": "subsection-infra-solr-col2"
+        }
+      ]
+    },
+    "widgets": [
+      {
+        "config": "infra-solr-env/infra_solr_minmem",
+        "widget": {
+          "type": "slider",
+          "units": [
+            {
+              "unit-name": "GB"
+            }
+          ]
+        }
+      },
+      {
+        "config": "infra-solr-env/infra_solr_maxmem",
+        "widget": {
+          "type": "slider",
+          "units": [
+            {
+              "unit-name": "GB"
+            }
+          ]
+        }
+      },
+      {
+        "config": "infra-solr-env/infra_solr_datadir",
+        "widget": {
+          "type": "directory"
+        }
+      },
+      {
+        "config": "infra-solr-env/infra_solr_znode",
+        "widget": {
+          "type": "directory"
+        }
+      }
+    ]
+  }
+}
+
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/metainfo.xml b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/metainfo.xml
index 9ff4090..446464c 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/metainfo.xml
@@ -91,8 +91,8 @@
               <name>atlas-metadata</name>
             </package>
             <package>
-              <name>ambari-logsearch-solr-client</name>
-              <condition>should_install_logsearch_solr_client</condition>
+              <name>ambari-infra-solr-client</name>
+              <condition>should_install_infra_solr_client</condition>
             </package>
           </packages>
         </osSpecific>
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata.py
index 8c57860..1a0e797 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata.py
@@ -107,7 +107,7 @@
          group=params.user_group
     )
 
-    if type == 'server' and params.search_backend_solr and params.has_logsearch_solr:
+    if type == 'server' and params.search_backend_solr and params.has_infra_solr:
       solr_cloud_util.setup_solr_client(params.config)
       check_znode()
       jaasFile=params.atlas_jaas_file if params.security_enabled else None
@@ -126,12 +126,11 @@
 
   solr_cloud_util.upload_configuration_to_zk(
       zookeeper_quorum=params.zookeeper_quorum,
-      solr_znode=params.logsearch_solr_znode,
+      solr_znode=params.infra_solr_znode,
       config_set_dir=format("{conf_dir}/solr"),
       config_set=config_set,
       tmp_dir=params.tmp_dir,
       java64_home=params.java64_home,
-      user=params.metadata_user,
       solrconfig_content=InlineTemplate(params.metadata_solrconfig_content),
       jaas_file=jaasFile,
       retry=30, interval=5)
@@ -141,20 +140,18 @@
 
   solr_cloud_util.create_collection(
       zookeeper_quorum=params.zookeeper_quorum,
-      solr_znode=params.logsearch_solr_znode,
+      solr_znode=params.infra_solr_znode,
       collection = collection,
       config_set=config_set,
       java64_home=params.java64_home,
-      user=params.metadata_user,
       jaas_file=jaasFile,
       shards=params.atlas_solr_shards,
-      replication_factor = params.logsearch_solr_replication_factor)
+      replication_factor = params.infra_solr_replication_factor)
 
 @retry(times=10, sleep_time=5, err_class=Fail)
 def check_znode():
   import params
   solr_cloud_util.check_znode(
     zookeeper_quorum=params.zookeeper_quorum,
-    solr_znode=params.logsearch_solr_znode,
-    java64_home=params.java64_home,
-    user=params.metadata_user)
+    solr_znode=params.infra_solr_znode,
+    java64_home=params.java64_home)
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
index 872410a..20c62bd 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
@@ -187,13 +187,12 @@
 atlas_search_backend = default("/configurations/application-properties/atlas.graph.index.search.backend", "")
 search_backend_solr = atlas_search_backend.startswith('solr')
 
-# logsearch solr
-logsearch_solr_znode = default("/configurations/logsearch-solr-env/logsearch_solr_znode", None)
-logsearch_solr_dir = '/usr/lib/ambari-logsearch-solr'
-logsearch_solr_hosts = default("/clusterHostInfo/logsearch_solr_hosts", [])
-logsearch_solr_replication_factor = 2 if len(logsearch_solr_hosts) > 1 else 1
+# infra solr
+infra_solr_znode = default("/configurations/infra-solr-env/infra_solr_znode", None)
+infra_solr_hosts = default("/clusterHostInfo/infra_solr_hosts", [])
+infra_solr_replication_factor = 2 if len(infra_solr_hosts) > 1 else 1
 atlas_solr_shards = default("/configurations/atlas-env/atlas_solr-shards", 1)
-has_logsearch_solr = len(logsearch_solr_hosts) > 0
+has_infra_solr = len(infra_solr_hosts) > 0
 
 # zookeeper
 zookeeper_hosts = config['clusterHostInfo']['zookeeper_hosts']
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/metainfo.xml b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/metainfo.xml
index 602e361..630d403 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/metainfo.xml
@@ -28,6 +28,15 @@
           <name>ATLAS_SERVER</name>
           <cardinality>1+</cardinality>
         </component>
+        <dependencies>
+          <dependency>
+            <name>AMBARI_INFRA/INFRA_SOLR_CLIENT</name>
+            <scope>host</scope>
+            <auto-deploy>
+              <enabled>true</enabled>
+            </auto-deploy>
+          </dependency>
+        </dependencies>
       </components>
 
       <quickLinksConfigurations>
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/alerts.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/alerts.json
index 3d58f65..537d7fa 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/alerts.json
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/alerts.json
@@ -28,160 +28,6 @@
           }
         }
       }
-    ],
-    "LOGSEARCH_SOLR": [
-      {
-        "name": "logsearch_solr",
-        "label": "Log Search Solr Web UI",
-        "description": "This host-level alert is triggered if the Solr Cloud Instance is unreachable.",
-        "interval": 1,
-        "scope": "ANY",
-        "source": {
-          "type": "WEB",
-          "uri": {
-            "http": "{{logsearch-solr-env/logsearch_solr_port}}",
-            "https": "{{logsearch-solr-env/logsearch_solr_port}}",
-            "https_property": "{{logsearch-solr-env/logsearch_solr_ssl_enabled}}",
-            "https_property_value": "true",
-            "connection_timeout": 5.0,
-            "kerberos_keytab": "{{logsearch-solr-env/logsearch_solr_web_kerberos_keytab}}",
-            "kerberos_principal": "{{logsearch-solr-env/logsearch_solr_web_kerberos_principal}}",
-            "default_port": 8886
-          },
-          "reporting": {
-            "ok": {
-              "text": "HTTP {0} response in {2:.3f}s"
-            },
-            "warning": {
-              "text": "HTTP {0} response from {1} in {2:.3f}s ({3})"
-            },
-            "critical": {
-              "text": "Connection failed to {1} ({3})"
-            }
-          }
-        }
-      },
-      {
-        "name": "logsearch_solr_index_size",
-        "label": "Log Search Solr Index Size",
-        "description": "This host-level alert is triggered if the Solr index size is over a limit.",
-        "interval": 1,
-        "scope": "ANY",
-        "source": {
-          "type": "AMS",
-          "uri": {
-            "http": "{{ams-site/timeline.metrics.service.webapp.address}}",
-            "https": "{{ams-site/timeline.metrics.service.webapp.address}}",
-            "https_property": "{{ams-site/timeline.metrics.service.http.policy}}",
-            "https_property_value": "HTTPS_ONLY",
-            "connection_timeout": 5.0
-          },
-          "reporting": {
-            "ok": {
-              "text": "Solr index size is {0:.1f}GB"
-            },
-            "warning": {
-              "text": "Solr index size is {0:.1f}GB",
-              "value": 50
-            },
-            "critical": {
-              "text": "Solr index size is {0:.1f}GB",
-              "value": 100
-            },
-            "units" : "GB"
-          },
-          "ams": {
-            "app_id": "logsearch-solr",
-            "interval": 1,
-            "metric_list": [
-              "logsearch.solr.index.size"
-            ],
-            "value": "{0}",
-            "compute": "mean"
-          }
-        }
-      },
-      {
-        "name": "logsearch_solr_cpu_usage",
-        "label": "Log Search Solr CPU Utilization",
-        "description": "This host-level alert is triggered if CPU utilization of the Log Search Solr exceeds certain warning and critical thresholds. It checks the Log Search Solr JMX Servlet for the SystemCPULoad property. The threshold values are in percent.",
-        "interval": 5,
-        "scope": "ANY",
-        "source": {
-          "type": "AMS",
-          "uri": {
-            "http": "{{ams-site/timeline.metrics.service.webapp.address}}",
-            "https": "{{ams-site/timeline.metrics.service.webapp.address}}",
-            "https_property": "{{ams-site/timeline.metrics.service.http.policy}}",
-            "https_property_value": "HTTPS_ONLY",
-            "connection_timeout": 5.0
-          },
-          "reporting": {
-            "ok": {
-              "text": "CPU load {0:.1%}"
-            },
-            "warning": {
-              "text": "CPU load {0:.1%}",
-              "value": 200
-            },
-            "critical": {
-              "text": "CPU load {0:.1%}",
-              "value": 250
-            },
-            "units" : "%"
-          },
-          "ams": {
-            "app_id": "logsearch-solr",
-            "interval": 1,
-            "metric_list": [
-              "logsearch.solr.cpu.usage"
-            ],
-            "value": "{0}",
-            "compute": "mean"
-          }
-        }
-      },
-      {
-        "name": "logsearch_solr_memory",
-        "label": "Log Search Solr Memory Utilization",
-        "description": "This host-level alert is triggered if CPU utilization of the Log Search Solr exceeds certain warning and critical thresholds. It checks the Log Search Solr JMX Servlet for the SystemCPULoad property. The threshold values are in percent.",
-        "interval": 5,
-        "scope": "ANY",
-        "source": {
-          "type": "AMS",
-          "uri": {
-            "http": "{{ams-site/timeline.metrics.service.webapp.address}}",
-            "https": "{{ams-site/timeline.metrics.service.webapp.address}}",
-            "https_property": "{{ams-site/timeline.metrics.service.http.policy}}",
-            "https_property_value": "HTTPS_ONLY",
-            "connection_timeout": 5.0
-          },
-          "reporting": {
-            "ok": {
-              "text": "Memory usage is {0:.1%}"
-            },
-            "warning": {
-              "text": "Memory usage is {0:.1%}",
-              "value": 200
-            },
-            "critical": {
-              "text": "Memory usage is {0:.1%}",
-              "value": 250
-            },
-            "units" : "%"
-          },
-          "ams": {
-            "app_id": "logsearch-solr",
-            "interval": 1,
-            "metric_list": [
-              "jvm.JvmMetrics.MemHeapUsedM",
-              "jvm.JvmMetrics.MemHeapMaxM"
-            ],
-            "value": "{0} / {1}",
-            "compute": "mean"
-          }
-        }
-      }
     ]
   }
 }
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml
index ab0e3b9..c5b9b4e 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml
@@ -82,17 +82,6 @@
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
-    <name>logsearch_solr_audit_logs_use_ranger</name>
-    <value>false</value>
-    <display-name>Ranger Audit Enabled</display-name>
-    <description>Use Ranger Audit collection. This is supported only if Ranger Solr is installed in SolrCloud mode
-    </description>
-    <value-attributes>
-      <type>boolean</type>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
     <name>logsearch_debug_enabled</name>
     <value>false</value>
     <display-name>Log Search Debug Enabled</display-name>
@@ -111,7 +100,7 @@
   </property>
   <property>
     <name>logsearch_solr_audit_logs_zk_node</name>
-    <value>{logsearch_solr_znode}</value>
+    <value>{infra_solr_znode}</value>
     <display-name>Solr Audit Logs Znode</display-name>
     <description>Only needed if using custom solr cloud. E.g. /audit_logs</description>
     <on-ambari-upgrade add="true"/>
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-env.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-env.xml
deleted file mode 100644
index e458931..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-solr-env.xml
+++ /dev/null
@@ -1,245 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<!-- This is a special config file for properties used to monitor status of the service -->
-<configuration supports_adding_forbidden="true">
-  <property>
-    <name>logsearch_solr_port</name>
-    <value>8886</value>
-    <description>Solr port</description>
-    <display-name>Log Search Solr port</display-name>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_jmx_port</name>
-    <value>18886</value>
-    <description>Solr JMX port</description>
-    <display-name>Log Search Solr JMX port</display-name>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_pid_dir</name>
-    <value>/var/run/ambari-logsearch-solr</value>
-    <description>Solr Process ID Directory</description>
-    <display-name>Log Search Solr pid dir</display-name>
-    <value-attributes>
-      <type>directory</type>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_log_dir</name>
-    <value>/var/log/ambari-logsearch-solr</value>
-    <description>Directory for Solr logs</description>
-    <display-name>Log Search Solr log dir</display-name>
-    <value-attributes>
-      <type>directory</type>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_client_log_dir</name>
-    <value>/var/log/ambari-logsearch-solr-client</value>
-    <description>Directory for Solr client logs</description>
-    <display-name>Log Search Solr Client log dir</display-name>
-    <value-attributes>
-      <type>directory</type>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_user</name>
-    <value>logsearch-solr</value>
-    <property-type>USER</property-type>
-    <description>Solr user</description>
-    <display-name>Log Search Solr User</display-name>
-    <value-attributes>
-      <type>user</type>
-      <overridable>false</overridable>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_datadir</name>
-    <value>/opt/logsearch_solr/data</value>
-    <display-name>Log Search Solr data dir</display-name>
-    <description>Directory for storting Solr index. Make sure you have enough disk space</description>
-    <value-attributes>
-      <type>directory</type>
-      <overridable>false</overridable>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_ssl_enabled</name>
-    <value>false</value>
-    <display-name>Enable SSL to Log Search Solr</display-name>
-    <description>Enable ssl to Solr</description>
-    <value-attributes>
-      <type>boolean</type>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_truststore_location</name>
-    <value>/etc/security/serverKeys/logsearch.trustStore.jks</value>
-    <display-name>Log Search Solr trust store location</display-name>
-    <description>Location of the trust store file.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_truststore_type</name>
-    <value>jks</value>
-    <display-name>Log Search Solr trust store type</display-name>
-    <description>Type of the trust store file.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_truststore_password</name>
-    <value>bigdata</value>
-    <property-type>PASSWORD</property-type>
-    <display-name>Log Search Solr trust store password</display-name>
-    <description>Password to open the trust store file.</description>
-    <value-attributes>
-      <type>password</type>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_keystore_location</name>
-    <value>/etc/security/serverKeys/logsearch.keyStore.jks</value>
-    <display-name>Log Search Solr key store location</display-name>
-    <description>Location of the key store file.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_keystore_type</name>
-    <value>jks</value>
-    <display-name>Log Search Solr key store type</display-name>
-    <description>Type of the key store file.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_keystore_password</name>
-    <value>bigdata</value>
-    <display-name>Log Search Solr key store password</display-name>
-    <property-type>PASSWORD</property-type>
-    <description>Password to open the key store file.</description>
-    <value-attributes>
-      <type>password</type>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_znode</name>
-    <value>/ambari-solr</value>
-    <description>Zookeeper znode, e.g: /logsearch</description>
-    <display-name>Log Search Solr ZNode</display-name>
-    <value-attributes>
-      <overridable>false</overridable>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_minmem</name>
-    <value>1024</value>
-    <display-name>Log Search Solr Minimum Heap Size</display-name>
-    <description>Solr minimum heap size e.g.512m</description>
-    <value-attributes>
-      <type>int</type>
-      <minimum>512</minimum>
-      <maximum>5120</maximum>
-      <unit>MB</unit>
-      <increment-step>256</increment-step>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>logsearch_solr_maxmem</name>
-    <value>2048</value>
-    <display-name>Log Search Solr Maximum Heap Size</display-name>
-    <description>Solr maximum heap size e.g. 512m</description>
-    <value-attributes>
-      <type>int</type>
-      <minimum>512</minimum>
-      <maximum>5120</maximum>
-      <unit>MB</unit>
-      <increment-step>256</increment-step>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-
-  <property>
-    <name>logsearch_solr_kerberos_keytab</name>
-    <value>/etc/security/keytabs/logsearch_solr.service.keytab</value>
-    <display-name>Log Search Solr keytab</display-name>
-    <description>The path to the Kerberos Keytab file containing service principal of the Log Search Solr.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-
-  <property>
-    <name>logsearch_solr_kerberos_principal</name>
-    <value>logsearch-solr</value>
-    <display-name>Log Search Solr principal</display-name>
-    <description>The service principal for Log Search Solr.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-
-  <property>
-    <name>logsearch_solr_web_kerberos_keytab</name>
-    <value>/etc/security/keytabs/spnego.service.keytab</value>
-    <display-name>Log Search Solr Http keytab</display-name>
-    <description>The path to the Kerberos Keytab file containing service principal of the Log Search Solr.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-
-  <property>
-    <name>logsearch_solr_web_kerberos_principal</name>
-    <value>HTTP/_HOST@EXAMPLE.COM</value>
-    <display-name>Log Search Solr Http principal</display-name>
-    <description>The service principal for the Log Search Solr.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-
-  <property>
-    <name>logsearch_solr_kerberos_name_rules</name>
-    <value>DEFAULT</value>
-    <description>Kerberos name rules for Spnego</description>
-    <value-attributes>
-      <overridable>false</overridable>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-
-  <!-- logsearch-solr-env.sh -->
-  <property>
-    <name>content</name>
-    <display-name>logsearch-solr-env template</display-name>
-    <description>This is the jinja template for logsearch-solr-env.sh file</description>
-    <value/>
-    <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
-    <value-attributes>
-      <property-file-name>logsearch-solr-env.sh.j2</property-file-name>
-      <property-file-type>text</property-file-type>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
index ab4793b..49d1b10 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
@@ -5,44 +5,10 @@
       "identities": [
         {
           "name": "/smokeuser"
-        },
-        {
-          "name": "/spnego",
-          "principal": {
-            "configuration": "logsearch-solr-env/logsearch_solr_web_kerberos_principal"
-          },
-          "keytab": {
-            "configuration": "logsearch-solr-env/logsearch_solr_web_kerberos_keytab"
-          }
         }
       ],
       "components": [
         {
-          "name": "LOGSEARCH_SOLR",
-          "identities": [
-            {
-              "name": "logsearch-solr",
-              "principal": {
-                "value": "logsearch-solr/_HOST@${realm}",
-                "type": "service",
-                "configuration": "logsearch-solr-env/logsearch_solr_kerberos_principal"
-              },
-              "keytab": {
-                "file": "${keytab_dir}/logsearch-solr.service.keytab",
-                "owner": {
-                  "name": "${logsearch-solr-env/logsearch_solr_user}",
-                  "access": "r"
-                },
-                "group": {
-                  "name": "${cluster-env/user_group}",
-                  "access": ""
-                },
-                "configuration": "logsearch-solr-env/logsearch_solr_kerberos_keytab"
-              }
-            }
-          ]
-        },
-        {
           "name": "LOGSEARCH_SERVER",
           "identities": [
           {
@@ -91,9 +57,6 @@
               }
             }
           ]
-        },
-        {
-          "name": "LOGSEARCH_SOLR_CLIENT"
         }
       ]
     }
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
index ebeffc2..e0280b4 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
@@ -21,7 +21,7 @@
     <service>
       <name>LOGSEARCH</name>
       <displayName>Log Search</displayName>
-      <comment>Log aggregation, analysis, and visualization for Ambari managed services. This service is Tech Preview.</comment>
+      <comment>Log aggregation, analysis, and visualization for Ambari managed services. This service is &lt;b&gt;Technical Preview&lt;/b&gt;.</comment>
       <version>0.5.0</version>
 
       <components>
@@ -48,7 +48,7 @@
           </logs>
           <dependencies>
             <dependency>
-              <name>LOGSEARCH/LOGSEARCH_SOLR_CLIENT</name>
+              <name>AMBARI_INFRA/INFRA_SOLR_CLIENT</name>
               <scope>host</scope>
               <auto-deploy>
                 <enabled>true</enabled>
@@ -65,55 +65,11 @@
 
           </dependencies>
           <configuration-dependencies>
+            <config-type>infra-solr-env</config-type>
             <config-type>logsearch-properties</config-type>
             <config-type>logsearch-env</config-type>
             <config-type>logsearch-log4j</config-type>
             <config-type>logsearch-admin-json</config-type>
-            <config-type>logsearch-solr-env</config-type>
-            <config-type>logsearch-service_logs-solrconfig.xml</config-type>
-            <config-type>logsearch-audit_logs-solrconfig.xml</config-type>
-          </configuration-dependencies>
-        </component>
-
-        <component>
-          <name>LOGSEARCH_SOLR</name>
-          <timelineAppid>logsearch-solr</timelineAppid>
-          <displayName>Log Search Solr Instance</displayName>
-          <category>MASTER</category>
-          <cardinality>1+</cardinality>
-          <versionAdvertised>false</versionAdvertised>
-          <commandScript>
-            <script>scripts/logsearch_solr.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <logs>
-            <log>
-              <logId>logsearch_solr</logId>
-              <primary>true</primary>
-            </log>
-          </logs>
-          <dependencies>
-            <dependency>
-              <name>LOGSEARCH/LOGSEARCH_SOLR_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-
-          </dependencies>
-          <configuration-dependencies>
-            <config-type>logsearch-solr-env</config-type>
-            <config-type>logsearch-solr-xml</config-type>
-            <config-type>logsearch-solr-log4j</config-type>
             <config-type>logsearch-service_logs-solrconfig.xml</config-type>
             <config-type>logsearch-audit_logs-solrconfig.xml</config-type>
           </configuration-dependencies>
@@ -150,32 +106,13 @@
           </dependencies>
 
           <configuration-dependencies>
+            <config-type>infra-solr-env</config-type>
             <config-type>logfeeder-properties</config-type>
             <config-type>logfeeder-env</config-type>
             <config-type>logfeeder-log4j</config-type>
-            <config-type>logsearch-solr-env</config-type>
           </configuration-dependencies>
         </component>
 
-        <component>
-          <name>LOGSEARCH_SOLR_CLIENT</name>
-          <displayName>Log Search Solr Client</displayName>
-          <category>CLIENT</category>
-          <cardinality>0+</cardinality>
-          <versionAdvertised>false</versionAdvertised>
-          <commandScript>
-            <script>scripts/logsearch_solr_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>env</type>
-              <fileName>log4j.properties</fileName>
-              <dictionaryName>logsearch-solr-client-log4j</dictionaryName>
-            </configFile>
-          </configFiles>
-        </component>
-
       </components>
 
       <osSpecifics>
@@ -183,11 +120,6 @@
           <osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>
           <packages>
             <package>
-              <name>ambari-logsearch-solr-client</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_logsearch_solr_client</condition>
-            </package>
-            <package>
               <name>ambari-logsearch-logfeeder</name>
               <skipUpgrade>true</skipUpgrade>
             </package>
@@ -196,22 +128,12 @@
               <skipUpgrade>true</skipUpgrade>
               <condition>should_install_logsearch_portal</condition>
             </package>
-            <package>
-              <name>ambari-logsearch-solr</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_logsearch_solr</condition>
-            </package>
           </packages>
         </osSpecific>
         <osSpecific>
           <osFamily>debian7,ubuntu12,ubuntu14,ubuntu16</osFamily>
           <packages>
             <package>
-              <name>ambari-logsearch-solr-client</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_logsearch_solr_client</condition>
-            </package>
-            <package>
               <name>ambari-logsearch-logfeeder</name>
               <skipUpgrade>true</skipUpgrade>
             </package>
@@ -220,11 +142,6 @@
               <skipUpgrade>true</skipUpgrade>
               <condition>should_install_logsearch_portal</condition>
             </package>
-            <package>
-              <name>ambari-logsearch-solr</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_logsearch_solr</condition>
-            </package>
           </packages>
         </osSpecific>
       </osSpecifics>
@@ -236,7 +153,7 @@
       </commandScript>
 
       <requiredServices>
-        <service>ZOOKEEPER</service>
+        <service>AMBARI_INFRA</service>
       </requiredServices>
 
       <quickLinksConfigurations>
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metrics.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metrics.json
index 1e1a388..7ffb77e 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metrics.json
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metrics.json
@@ -49,56 +49,5 @@
         }
       }
     ]
-  },
-    "LOGSEARCH_SOLR": {
-    "Component": [
-      {
-        "type": "ganglia",
-        "metrics": {
-          "default": {
-            "metrics/logsearch/solr/cpu/usage": {
-              "metric": "logsearch.solr.cpu.usage",
-              "pointInTime": true,
-              "temporal": true
-            },
-            "metrics/jvm/memHeapUsedM": {
-              "metric": "jvm.JvmMetrics.MemHeapUsedM",
-              "pointInTime": true,
-              "temporal": true
-            },
-            "metrics/jvm/memHeapCommittedM": {
-              "metric": "jvm.JvmMetrics.MemHeapCommittedM",
-              "pointInTime": true,
-              "temporal": true
-            },
-            "metrics/jvm/memHeapMaxM": {
-              "metric": "jvm.JvmMetrics.MemHeapMaxM",
-              "pointInTime": true,
-              "temporal": true
-            },
-            "metrics/jvm/memNonHeapUsedM": {
-              "metric": "jvm.JvmMetrics.MemNonHeapUsedM",
-              "pointInTime": true,
-              "temporal": true
-            },
-            "metrics/jvm/memNonHeapCommittedM": {
-              "metric": "jvm.JvmMetrics.MemNonHeapCommittedM",
-              "pointInTime": true,
-              "temporal": true
-            },
-            "metrics/jvm/memNonHeapMaxM": {
-              "metric": "jvm.JvmMetrics.MemNonHeapMaxM",
-              "pointInTime": true,
-              "temporal": true
-            },
-            "metrics/logsearch/solr/index/size": {
-              "metric": "logsearch.solr.index.size",
-              "pointInTime": true,
-              "temporal": true
-            }
-          }
-        }
-      }
-    ]
   }
 }
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch_solr.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch_solr.py
deleted file mode 100644
index 4f89bd3..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch_solr.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management.core.resources.system import Execute, File
-from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.check_process_status import check_process_status
-from resource_management.libraries.script.script import Script
-from setup_logsearch_solr import setup_logsearch_solr
-from logsearch_common import kill_process
-from resource_management.core.logger import Logger
-import sys
-
-class LogsearchSolr(Script):
-  def install(self, env):
-    import params
-    env.set_params(params)
-    self.install_packages(env)
-
-  def configure(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-
-    setup_logsearch_solr(name = 'server')
-
-  def start(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-    self.configure(env)
-
-    Execute(
-      format('{solr_bindir}/solr start -cloud -noprompt -s {logsearch_solr_datadir} >> {logsearch_solr_log} 2>&1'),
-      environment={'SOLR_INCLUDE': format('{logsearch_solr_conf}/logsearch-solr-env.sh')},
-      user=params.logsearch_solr_user
-    )
-
-  def stop(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-
-    try:
-      Execute(format('{solr_bindir}/solr stop -all >> {logsearch_solr_log}'),
-              environment={'SOLR_INCLUDE': format('{logsearch_solr_conf}/logsearch-solr-env.sh')},
-              user=params.logsearch_solr_user,
-              only_if=format("test -f {logsearch_solr_pidfile}")
-              )
-      
-      File(params.logsearch_solr_pidfile,
-           action="delete"
-           )
-    except:
-      Logger.warning("Could not stop solr:" + str(sys.exc_info()[1]) + "\n Trying to kill it")
-      kill_process(params.logsearch_solr_pidfile, params.logsearch_solr_user, params.logsearch_solr_log_dir);
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-
-    check_process_status(status_params.logsearch_solr_pidfile)
-
-
-if __name__ == "__main__":
-  LogsearchSolr().execute()
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
index 0d6f4ec..cb7b79f 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
@@ -23,7 +23,6 @@
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.is_empty import is_empty
 from resource_management.libraries.script.script import Script
-import os
 import status_params
 
 
@@ -42,16 +41,11 @@
 sudo = AMBARI_SUDO_BINARY
 security_enabled = status_params.security_enabled
 
-logsearch_solr_conf = "/etc/ambari-logsearch-solr/conf"
 logsearch_server_conf = "/etc/ambari-logsearch-portal/conf"
 logsearch_logfeeder_conf = "/etc/ambari-logsearch-logfeeder/conf"
 
 logsearch_config_set_dir = format("{logsearch_server_conf}/solr_configsets")
 
-logsearch_solr_port = status_params.logsearch_solr_port
-logsearch_solr_piddir = status_params.logsearch_solr_piddir
-logsearch_solr_pidfile = status_params.logsearch_solr_pidfile
-
 # logsearch pid file
 logsearch_pid_dir = status_params.logsearch_pid_dir
 logsearch_pid_file = status_params.logsearch_pid_file
@@ -81,38 +75,15 @@
 else:
   metrics_collector_hosts = ''
 
-#####################################
-# Solr configs
-#####################################
-
-# Only supporting SolrCloud mode - so hardcode those options
-solr_cloudmode = 'true'
-solr_dir = '/usr/lib/ambari-logsearch-solr'
-solr_client_dir = '/usr/lib/ambari-logsearch-solr-client'
-solr_bindir = solr_dir + '/bin'
-cloud_scripts = solr_dir + '/server/scripts/cloud-scripts'
-
-logsearch_solr_znode = config['configurations']['logsearch-solr-env']['logsearch_solr_znode']
-logsearch_solr_min_mem = format(config['configurations']['logsearch-solr-env']['logsearch_solr_minmem'])
-logsearch_solr_max_mem = format(config['configurations']['logsearch-solr-env']['logsearch_solr_maxmem'])
-logsearch_solr_instance_count = len(config['clusterHostInfo']['logsearch_solr_hosts'])
-logsearch_solr_datadir = format(config['configurations']['logsearch-solr-env']['logsearch_solr_datadir'])
-logsearch_solr_data_resources_dir = os.path.join(logsearch_solr_datadir, 'resources')
-logsearch_service_logs_max_retention = config['configurations']['logsearch-service_logs-solrconfig']['logsearch_service_logs_max_retention']
-logsearch_service_logs_merge_factor = config['configurations']['logsearch-service_logs-solrconfig']['logsearch_service_logs_merge_factor']
-logsearch_audit_logs_max_retention = config['configurations']['logsearch-audit_logs-solrconfig']['logsearch_audit_logs_max_retention']
-logsearch_audit_logs_merge_factor = config['configurations']['logsearch-audit_logs-solrconfig']['logsearch_audit_logs_merge_factor']
-
 logsearch_solr_metrics_collector_hosts = format(config['configurations']['logsearch-properties']['logsearch.solr.metrics.collector.hosts'])
-logsearch_solr_jmx_port = config['configurations']['logsearch-solr-env']['logsearch_solr_jmx_port']
 
-logsearch_service_logs_fields = config['configurations']['logsearch-properties']['logsearch.service.logs.fields']
-
-logsearch_audit_logs_split_interval_mins = config['configurations']['logsearch-properties']['logsearch.audit.logs.split.interval.mins']
-logsearch_service_logs_split_interval_mins = config['configurations']['logsearch-properties']['logsearch.service.logs.split.interval.mins']
+# Infra Solr configs
+infra_solr_znode = default('/configurations/infra-solr-env/infra_solr_znode', '/infra-solr')
+infra_solr_instance_count = len(config['clusterHostInfo']['infra_solr_hosts'])
+infra_solr_ssl_enabled = default('configurations/infra-solr-env/infra_solr_ssl_enabled', False)
+infra_solr_jmx_port = config['configurations']['infra-solr-env']['infra_solr_jmx_port']
 
 zookeeper_port = default('/configurations/zoo.cfg/clientPort', None)
-# get comma separated list of zookeeper hosts from clusterHostInfo
 index = 0
 zookeeper_quorum = ""
 for host in config['clusterHostInfo']['zookeeper_hosts']:
@@ -121,43 +92,16 @@
   if index < len(config['clusterHostInfo']['zookeeper_hosts']):
     zookeeper_quorum += ","
 
-if 'zoo.cfg' in config['configurations']:
-  zoo_cfg_properties_map = config['configurations']['zoo.cfg']
-else:
-  zoo_cfg_properties_map = {}
-
-
 
 if security_enabled:
   kinit_path_local = status_params.kinit_path_local
   _hostname_lowercase = config['hostname'].lower()
   logsearch_jaas_file = logsearch_server_conf + '/logsearch_jaas.conf'
-  logsearch_solr_jaas_file = logsearch_solr_conf + '/logsearch_solr_jaas.conf'
   logfeeder_jaas_file = logsearch_logfeeder_conf + '/logfeeder_jaas.conf'
-  logsearch_solr_kerberos_keytab = config['configurations']['logsearch-solr-env']['logsearch_solr_kerberos_keytab']
-  logsearch_solr_kerberos_principal = config['configurations']['logsearch-solr-env']['logsearch_solr_kerberos_principal'].replace('_HOST',_hostname_lowercase)
-  logsearch_solr_web_kerberos_keytab = config['configurations']['logsearch-solr-env']['logsearch_solr_web_kerberos_keytab']
-  logsearch_solr_web_kerberos_principal = config['configurations']['logsearch-solr-env']['logsearch_solr_web_kerberos_principal'].replace('_HOST',_hostname_lowercase)
   logsearch_kerberos_keytab = config['configurations']['logsearch-env']['logsearch_kerberos_keytab']
   logsearch_kerberos_principal = config['configurations']['logsearch-env']['logsearch_kerberos_principal'].replace('_HOST',_hostname_lowercase)
   logfeeder_kerberos_keytab = config['configurations']['logfeeder-env']['logfeeder_kerberos_keytab']
   logfeeder_kerberos_principal = config['configurations']['logfeeder-env']['logfeeder_kerberos_principal'].replace('_HOST',_hostname_lowercase)
-  logsearch_solr_kerberos_name_rules = config['configurations']['logsearch-solr-env']['logsearch_solr_kerberos_name_rules']
-
-
-logsearch_solr_user = config['configurations']['logsearch-solr-env']['logsearch_solr_user']
-logsearch_solr_log_dir = config['configurations']['logsearch-solr-env']['logsearch_solr_log_dir']
-logsearch_solr_client_log_dir = config['configurations']['logsearch-solr-env']['logsearch_solr_client_log_dir']
-logsearch_solr_client_log = format("{logsearch_solr_client_log_dir}/solr-client.log")
-logsearch_solr_log = format("{logsearch_solr_log_dir}/solr-install.log")
-
-solr_env_content = config['configurations']['logsearch-solr-env']['content']
-
-solr_xml_content = config['configurations']['logsearch-solr-xml']['content']
-
-solr_log4j_content = config['configurations']['logsearch-solr-log4j']['content']
-
-solr_client_log4j_content = config['configurations']['logsearch-solr-client-log4j']['content']
 
 #####################################
 # Logsearch configs
@@ -170,40 +114,34 @@
 if logsearch_collection_service_logs_numshards_config > 0:
   logsearch_collection_service_logs_numshards = str(logsearch_collection_service_logs_numshards_config)
 else:
-  logsearch_collection_service_logs_numshards = format(str(logsearch_solr_instance_count))
+  logsearch_collection_service_logs_numshards = format(str(infra_solr_instance_count))
 
 if logsearch_collection_audit_logs_numshards_config > 0:
   logsearch_collection_audit_logs_numshards = str(logsearch_collection_audit_logs_numshards_config)
 else:
-  logsearch_collection_audit_logs_numshards = format(str(logsearch_solr_instance_count))
+  logsearch_collection_audit_logs_numshards = format(str(infra_solr_instance_count))
 
 logsearch_collection_service_logs_replication_factor = str(config['configurations']['logsearch-properties']['logsearch.collection.service.logs.replication.factor'])
 logsearch_collection_audit_logs_replication_factor = str(config['configurations']['logsearch-properties']['logsearch.collection.audit.logs.replication.factor'])
 
 logsearch_solr_collection_service_logs = default('/configurations/logsearch-properties/logsearch.solr.collection.service.logs', 'hadoop_logs')
 logsearch_solr_collection_audit_logs = default('/configurations/logsearch-properties/logsearch.solr.collection.audit.logs','audit_logs')
+
+logsearch_service_logs_max_retention = config['configurations']['logsearch-service_logs-solrconfig']['logsearch_service_logs_max_retention']
+logsearch_service_logs_merge_factor = config['configurations']['logsearch-service_logs-solrconfig']['logsearch_service_logs_merge_factor']
+logsearch_service_logs_fields = config['configurations']['logsearch-properties']['logsearch.service.logs.fields']
+logsearch_service_logs_split_interval_mins = config['configurations']['logsearch-properties']['logsearch.service.logs.split.interval.mins']
+
+logsearch_audit_logs_max_retention = config['configurations']['logsearch-audit_logs-solrconfig']['logsearch_audit_logs_max_retention']
+logsearch_audit_logs_merge_factor = config['configurations']['logsearch-audit_logs-solrconfig']['logsearch_audit_logs_merge_factor']
+logsearch_audit_logs_split_interval_mins = config['configurations']['logsearch-properties']['logsearch.audit.logs.split.interval.mins']
+
 logsearch_logfeeder_include_default_level = default('/configurations/logsearch-properties/logsearch.logfeeder.include.default.level', 'fatal,error,warn')
 
-logsearch_solr_audit_logs_use_ranger = default('/configurations/logsearch-env/logsearch_solr_audit_logs_use_ranger', False)
-logsearch_solr_audit_logs_url = ''
-
-if logsearch_solr_audit_logs_use_ranger:
-  # In Ranger, this contain the /zkNode also
-  ranger_audit_solr_zookeepers = default('/configurations/ranger-admin-site/ranger.audit.solr.zookeepers', None)
-  # TODO: ranger property already has zk node appended. We need to remove it.
-  # For now, let's assume it is going to be URL
-  logsearch_solr_audit_logs_url = default('/configurations/ranger-admin-site/ranger.audit.solr.urls', solr_audit_logs_url)
-else:
-  logsearch_solr_audit_logs_zk_node = default('/configurations/logsearch-env/logsearch_solr_audit_logs_zk_node', None)
-  logsearch_solr_audit_logs_zk_quorum = default('/configurations/logsearch-env/logsearch_solr_audit_logs_zk_quorum', None)
-
-  if not (logsearch_solr_audit_logs_zk_quorum):
-    logsearch_solr_audit_logs_zk_quorum = zookeeper_quorum
-  if not (logsearch_solr_audit_logs_zk_node):
-    logsearch_solr_audit_logs_zk_node = logsearch_solr_znode
-
-  logsearch_solr_audit_logs_zk_node = format(logsearch_solr_audit_logs_zk_node)
-  logsearch_solr_audit_logs_zk_quorum = format(logsearch_solr_audit_logs_zk_quorum)
+logsearch_solr_audit_logs_zk_node = default('/configurations/logsearch-env/logsearch_solr_audit_logs_zk_node', zookeeper_quorum)
+logsearch_solr_audit_logs_zk_quorum = default('/configurations/logsearch-env/logsearch_solr_audit_logs_zk_quorum', infra_solr_znode)
+logsearch_solr_audit_logs_zk_node = format(logsearch_solr_audit_logs_zk_node)
+logsearch_solr_audit_logs_zk_quorum = format(logsearch_solr_audit_logs_zk_quorum)
 
 # create custom properties - remove defaults
 logsearch_custom_properties = dict(config['configurations']['logsearch-properties'])
@@ -257,6 +195,7 @@
 hdfs_log_dir_prefix = default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')
 hive_log_dir = default('/configurations/hive-env/hive_log_dir', '/var/log/hive')
 hcat_log_dir = default('configurations/hive-env/hcat_log_dir', '/var/log/webhcat')
+infra_solr_log_dir = default('configurations/infra-solr-env/infra_solr_log_dir', '/var/log/ambari-infra-solr')
 kafka_log_dir = default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')
 nifi_log_dir = default('/configurations/nifi-env/nifi_node_log_dir', '/var/log/nifi')
 oozie_log_dir = default('/configurations/oozie-env/oozie_log_dir', '/var/log/oozie')
@@ -316,13 +255,6 @@
 logfeeder_env_content = config['configurations']['logfeeder-env']['content']
 logfeeder_log4j_content = config['configurations']['logfeeder-log4j']['content']
 
-logsearch_solr_ssl_enabled = default('configurations/logsearch-solr-env/logsearch_solr_ssl_enabled', False)
-logsearch_solr_keystore_location = config['configurations']['logsearch-solr-env']['logsearch_solr_keystore_location']
-logsearch_solr_keystore_password = config['configurations']['logsearch-solr-env']['logsearch_solr_keystore_password']
-logsearch_solr_keystore_type = config['configurations']['logsearch-solr-env']['logsearch_solr_keystore_type']
-logsearch_solr_truststore_location = config['configurations']['logsearch-solr-env']['logsearch_solr_truststore_location']
-logsearch_solr_truststore_password = config['configurations']['logsearch-solr-env']['logsearch_solr_truststore_password']
-logsearch_solr_truststore_type = config['configurations']['logsearch-solr-env']['logsearch_solr_truststore_type']
 logsearch_keystore_location = config['configurations']['logsearch-env']['logsearch_keystore_location']
 logsearch_keystore_password = config['configurations']['logsearch-env']['logsearch_keystore_password']
 logsearch_keystore_type = config['configurations']['logsearch-env']['logsearch_keystore_type']
@@ -342,7 +274,7 @@
 logfeeder_log_filter_enable = str(default('/configurations/logfeeder-properties/logfeeder.log.filter.enable', True)).lower()
 logfeeder_solr_config_interval = default('/configurations/logfeeder-properties/logfeeder.solr.config.interval', 5)
 
-logfeeder_supported_services = ['accumulo', 'ambari', 'ams', 'atlas', 'falcon', 'flume', 'hbase', 'hdfs', 'hive', 'hst', 'kafka',
+logfeeder_supported_services = ['accumulo', 'ambari', 'ams', 'atlas', 'falcon', 'flume', 'hbase', 'hdfs', 'hive', 'hst', 'infra', 'kafka',
                                 'knox', 'logsearch', 'nifi', 'oozie', 'ranger', 'spark', 'spark2', 'storm', 'yarn', 'zeppelin', 'zookeeper']
 
 logfeeder_config_file_names = ['global.config.json', 'output.config.json'] + ['input.config-%s.json' % (tag) for tag in
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
index aced026..aa89609 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
@@ -121,12 +121,11 @@
   jaas_file = params.logsearch_jaas_file if params.security_enabled else None
   solr_cloud_util.upload_configuration_to_zk(
     zookeeper_quorum=params.zookeeper_quorum,
-    solr_znode=params.logsearch_solr_znode,
+    solr_znode=params.infra_solr_znode,
     config_set_dir=format("{logsearch_server_conf}/solr_configsets/{config_set}/conf"),
     config_set=config_set,
     tmp_dir=params.tmp_dir,
     java64_home=params.java64_home,
-    user=params.logsearch_solr_user,
     solrconfig_content= solrconfig_content,
     jaas_file=jaas_file,
     retry=30, interval=5)
@@ -136,6 +135,5 @@
   import params
   solr_cloud_util.check_znode(
     zookeeper_quorum=params.zookeeper_quorum,
-    solr_znode=params.logsearch_solr_znode,
-    java64_home=params.java64_home,
-    user=params.logsearch_solr_user)
+    solr_znode=params.infra_solr_znode,
+    java64_home=params.java64_home)
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch_solr.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch_solr.py
deleted file mode 100644
index 42f34a0..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch_solr.py
+++ /dev/null
@@ -1,129 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management.core.exceptions import Fail
-from resource_management.core.source import InlineTemplate, Template
-from resource_management.core.resources.system import Directory, File
-from resource_management.libraries.functions.decorator import retry
-from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import solr_cloud_util
-
-
-def setup_logsearch_solr(name = None):
-  import params
-
-  if name == 'server':
-    Directory([params.logsearch_solr_log_dir, params.logsearch_solr_piddir,
-               params.logsearch_solr_datadir, params.logsearch_solr_data_resources_dir],
-              mode=0755,
-              cd_access='a',
-              create_parents=True,
-              owner=params.logsearch_solr_user,
-              group=params.user_group
-              )
-
-    Directory([params.solr_dir, params.logsearch_solr_conf],
-              mode=0755,
-              cd_access='a',
-              owner=params.logsearch_solr_user,
-              group=params.user_group,
-              create_parents=True,
-              recursive_ownership=True
-              )
-
-    File(params.logsearch_solr_log,
-         mode=0644,
-         owner=params.logsearch_solr_user,
-         group=params.user_group,
-         content=''
-         )
-
-    File(format("{logsearch_solr_conf}/logsearch-solr-env.sh"),
-         content=InlineTemplate(params.solr_env_content),
-         mode=0755,
-         owner=params.logsearch_solr_user,
-         group=params.user_group
-         )
-
-    File(format("{logsearch_solr_datadir}/solr.xml"),
-         content=InlineTemplate(params.solr_xml_content),
-         owner=params.logsearch_solr_user,
-         group=params.user_group
-         )
-
-    File(format("{logsearch_solr_conf}/log4j.properties"),
-         content=InlineTemplate(params.solr_log4j_content),
-         owner=params.logsearch_solr_user,
-         group=params.user_group
-         )
-
-    File(format("{logsearch_solr_datadir}/zoo.cfg"),
-         content=Template("zoo.cfg.j2"),
-         owner=params.logsearch_solr_user,
-         group=params.user_group
-         )
-
-    jaas_file = params.logsearch_solr_jaas_file if params.security_enabled else None
-    url_scheme = 'https' if params.logsearch_solr_ssl_enabled else 'http'
-
-    create_ambari_solr_znode()
-
-    if params.security_enabled:
-      File(format("{logsearch_solr_jaas_file}"),
-           content=Template("logsearch_solr_jaas.conf.j2"),
-           owner=params.logsearch_solr_user)
-
-    solr_cloud_util.set_cluster_prop(
-      zookeeper_quorum=params.zookeeper_quorum,
-      solr_znode=params.logsearch_solr_znode,
-      java64_home=params.java64_home,
-      user=params.logsearch_solr_user,
-      prop_name="urlScheme",
-      prop_value=url_scheme,
-      jaas_file=jaas_file
-    )
-
-    solr_cloud_util.setup_kerberos_plugin(
-      zookeeper_quorum=params.zookeeper_quorum,
-      solr_znode=params.logsearch_solr_znode,
-      user=params.logsearch_solr_user,
-      jaas_file=jaas_file,
-      java64_home=params.java64_home,
-      secure=params.security_enabled
-    )
-
-
-  elif name == 'client':
-    solr_cloud_util.setup_solr_client(params.config)
-    if params.security_enabled:
-      File(format("{solr_client_dir}/logsearch_solr_client_jaas.conf"),
-           content=Template("logsearch_solr_jaas.conf.j2"),
-           owner=params.logsearch_solr_user)
-
-  else :
-    raise Fail('Nor client or server were selected to install.')
-
-@retry(times=30, sleep_time=5, err_class=Fail)
-def create_ambari_solr_znode():
-  import params
-  solr_cloud_util.create_znode(
-    zookeeper_quorum=params.zookeeper_quorum,
-    solr_znode=params.logsearch_solr_znode,
-    java64_home=params.java64_home,
-    user=params.logsearch_solr_user)
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/status_params.py
index ac89105..3046452 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/status_params.py
@@ -26,10 +26,6 @@
 
 config = Script.get_config()
 
-logsearch_solr_port = config['configurations']['logsearch-solr-env']['logsearch_solr_port']
-logsearch_solr_piddir = config['configurations']['logsearch-solr-env']['logsearch_solr_pid_dir']
-logsearch_solr_pidfile = format("{logsearch_solr_piddir}/solr-{logsearch_solr_port}.pid")
-
 # logsearch pid file
 logsearch_pid_dir = config['configurations']['logsearch-env']['logsearch_pid_dir']
 logsearch_pid_file = format("{logsearch_pid_dir}/logsearch.pid")
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
index c575cff..9ce3fde 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
@@ -238,6 +238,16 @@
       ]
     },
 {% endif %}
+    "ambari_infra" : {
+      "label": "Infra",
+      "components" : [
+        {
+          "name": "infra_solr"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
 {% if "KNOX" in availableServices %}
     "knox": {
       "label": "Knox",
@@ -300,9 +310,6 @@
         },
         {
           "name": "logsearch_perf"
-        },
-        {
-          "name": "logsearch_solr"
         }
       ],
       "dependencies": [
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2
new file mode 100644
index 0000000..d2b9ce5
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2
@@ -0,0 +1,56 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"infra_solr",
+      "rowtype":"service",
+      "path":"{{infra_solr_log_dir}}/solr.log"
+    }
+
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "infra_solr"
+          ]
+
+        }
+
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+
+        }
+
+      }
+
+    }
+
+  ]
+
+}
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
index 31c37bf..ea91405 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
@@ -31,13 +31,7 @@
       "type":"logsearch_perf",
       "rowtype":"service",
       "path":"{{logsearch_log_dir}}/logsearch-performance.json"
-    },
-    {
-      "type":"logsearch_solr",
-      "rowtype":"service",
-      "path":"{{logsearch_solr_log_dir}}/solr.log"
     }
-
   ],
   "filter":[
     {
@@ -54,31 +48,6 @@
 
       }
 
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "logsearch_solr"
-          ]
-
-        }
-
-      },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
     }
 
   ]
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2
index 529fa8f..86f0be4 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2
@@ -19,7 +19,7 @@
 logfeeder.log.filter.enable={{logfeeder_log_filter_enable}}
 logfeeder.solr.config.interval={{logfeeder_solr_config_interval}}
 logfeeder.solr.core.config.name=history
-logfeeder.solr.zk_connect_string={{zookeeper_quorum}}{{logsearch_solr_znode}}
+logfeeder.solr.zk_connect_string={{zookeeper_quorum}}{{infra_solr_znode}}
 
 # Custom properties
 {% for key, value in logfeeder_custom_properties.items() %}
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
index eaa0a90..1bcee2d 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-logsearch.solr.zk_connect_string={{zookeeper_quorum}}{{logsearch_solr_znode}}
+logsearch.solr.zk_connect_string={{zookeeper_quorum}}{{infra_solr_znode}}
 
 # Service Logs
 logsearch.solr.collection.service.logs={{logsearch_solr_collection_service_logs}}
@@ -27,7 +27,7 @@
 # Audit logs
 logsearch.solr.audit.logs.zk_connect_string={{logsearch_solr_audit_logs_zk_quorum}}{{logsearch_solr_audit_logs_zk_node}}
 logsearch.solr.collection.audit.logs={{logsearch_solr_collection_audit_logs}}
-logsearch.solr.audit.logs.url={{logsearch_solr_audit_logs_url}}
+logsearch.solr.audit.logs.url=
 
 logsearch.audit.logs.split.interval.mins={{logsearch_audit_logs_split_interval_mins}}
 logsearch.collection.audit.logs.numshards={{logsearch_collection_audit_logs_numshards}}
@@ -40,7 +40,7 @@
 
 # Metrics
 logsearch.solr.metrics.collector.hosts={{logsearch_solr_metrics_collector_hosts}}
-logsearch.solr.jmx.port={{logsearch_solr_jmx_port}}
+logsearch.solr.jmx.port={{infra_solr_jmx_port}}
 
 # Logfeeder Settings
 
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2
index ba8df00..ff43323 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2
@@ -18,10 +18,10 @@
 {
   "output":[
     {
-      "is_enabled":"{{solr_service_logs_enable}}",
       "comment":"Output to solr for service logs",
+      "is_enabled":"{{solr_service_logs_enable}}",
       "destination":"solr",
-      "zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
+      "zk_connect_string":"{{zookeeper_quorum}}{{infra_solr_znode}}",
       "collection":"{{logsearch_solr_collection_service_logs}}",
       "number_of_shards": "{{logsearch_collection_service_logs_numshards}}",
       "splits_interval_mins": "{{logsearch_service_logs_split_interval_mins}}",
@@ -40,7 +40,7 @@
       "comment":"Output to solr for audit records",
       "is_enabled":"{{solr_audit_logs_enable}}",
       "destination":"solr",
-      "zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
+      "zk_connect_string":"{{zookeeper_quorum}}{{infra_solr_znode}}",
       "collection":"{{logsearch_solr_collection_audit_logs}}",
       "number_of_shards": "{{logsearch_collection_audit_logs_numshards}}",
       "splits_interval_mins": "{{logsearch_audit_logs_split_interval_mins}}",
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/zoo.cfg.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/zoo.cfg.j2
deleted file mode 100644
index 4136f2a..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/zoo.cfg.j2
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-{% for key, value in zoo_cfg_properties_map.iteritems() -%}
-  {{key}}={{value}}
-{% endfor %}
-{% for host in zookeeper_hosts_list -%}
-server.{{loop.index}}={{host}}:2888:3888
-{% endfor %}
-
-{% if security_enabled -%}
-authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider
-jaasLoginRenew=3600000
-kerberos.removeHostFromPrincipal=true
-kerberos.removeRealmFromPrincipal=true
-{% endif %}
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-env.sh.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-env.sh.j2
index 6795dab..2818708 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-env.sh.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-env.sh.j2
@@ -32,7 +32,7 @@
   export LOGFEEDER_JAVA_MEM=-Xmx{{logfeeder_max_mem}}
 fi
 
-{% if logsearch_solr_ssl_enabled %}
+{% if infra_solr_ssl_enabled %}
 export LOGFEEDER_SSL="true"
 export LOGFEEDER_KEYSTORE_LOCATION={{logfeeder_keystore_location}}
 export LOGFEEDER_KEYSTORE_PASSWORD={{logfeeder_keystore_password}}
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-env.sh.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-env.sh.j2
index 17967a9..501603a 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-env.sh.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-env.sh.j2
@@ -38,7 +38,7 @@
 
 export LOGSEARCH_DEBUG_PORT={{logsearch_debug_port}}
 
-{% if logsearch_solr_ssl_enabled or logsearch_ui_protocol == 'https' %}
+{% if infra_solr_ssl_enabled or logsearch_ui_protocol == 'https' %}
 export LOGSEARCH_SSL="true"
 export LOGSEARCH_KEYSTORE_LOCATION={{logsearch_keystore_location}}
 export LOGSEARCH_KEYSTORE_PASSWORD={{logsearch_keystore_password}}
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/themes/theme.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/themes/theme.json
index d8de082..2ed6474 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/themes/theme.json
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/themes/theme.json
@@ -14,25 +14,6 @@
               "tab-rows": "1",
               "sections": [
                 {
-                  "name": "section-logsearch-solr",
-                  "display-name": "Log Search Solr",
-                  "row-index": "0",
-                  "column-index": "0",
-                  "row-span": "1",
-                  "column-span": "1",
-                  "section-columns": "1",
-                  "section-rows": "4",
-                  "subsections": [
-                    {
-                      "name": "subsection-logsearch-solr-col1",
-                      "row-index": "0",
-                      "column-index": "0",
-                      "row-span": "1",
-                      "column-span": "1"
-                    }
-                  ]
-                },
-                {
                   "name": "section-logsearch-server",
                   "display-name": "Log Search Server",
                   "row-index": "0",
@@ -48,6 +29,13 @@
                       "column-index": "0",
                       "row-span": "1",
                       "column-span": "1"
+                    },
+                    {
+                      "name": "subsection-logsearch-server-col2",
+                      "row-index": "0",
+                      "column-index": "1",
+                      "row-span": "1",
+                      "column-span": "1"
                     }
                   ]
                 }
@@ -61,22 +49,6 @@
       "configuration-layout": "default",
       "configs": [
         {
-          "config": "logsearch-solr-env/logsearch_solr_minmem",
-          "subsection-name": "subsection-logsearch-solr-col1"
-        },
-        {
-          "config": "logsearch-solr-env/logsearch_solr_maxmem",
-          "subsection-name": "subsection-logsearch-solr-col1"
-        },
-        {
-          "config": "logsearch-solr-env/logsearch_solr_datadir",
-          "subsection-name": "subsection-logsearch-solr-col1"
-        },
-        {
-          "config": "logsearch-solr-env/logsearch_solr_znode",
-          "subsection-name": "subsection-logsearch-solr-col1"
-        },
-        {
           "config" : "logsearch-properties/logsearch.logfeeder.include.default.level",
           "subsection-name": "subsection-logsearch-server-col1"
         },
@@ -86,58 +58,24 @@
         },
         {
           "config": "logsearch-properties/logsearch.collection.service.logs.numshards",
-          "subsection-name": "subsection-logsearch-server-col1"
+          "subsection-name": "subsection-logsearch-server-col2"
         },
         {
           "config": "logsearch-properties/logsearch.collection.service.logs.replication.factor",
-          "subsection-name": "subsection-logsearch-server-col1"
+          "subsection-name": "subsection-logsearch-server-col2"
         },
         {
           "config": "logsearch-properties/logsearch.collection.audit.logs.numshards",
-          "subsection-name": "subsection-logsearch-server-col1"
+          "subsection-name": "subsection-logsearch-server-col2"
         },
         {
           "config": "logsearch-properties/logsearch.collection.audit.logs.replication.factor",
-          "subsection-name": "subsection-logsearch-server-col1"
+          "subsection-name": "subsection-logsearch-server-col2"
         }
       ]
     },
     "widgets": [
       {
-        "config": "logsearch-solr-env/logsearch_solr_minmem",
-        "widget": {
-          "type": "slider",
-          "units": [
-            {
-              "unit-name": "GB"
-            }
-          ]
-        }
-      },
-      {
-        "config": "logsearch-solr-env/logsearch_solr_maxmem",
-        "widget": {
-          "type": "slider",
-          "units": [
-            {
-              "unit-name": "GB"
-            }
-          ]
-        }
-      },
-      {
-        "config": "logsearch-solr-env/logsearch_solr_datadir",
-        "widget": {
-          "type": "directory"
-        }
-      },
-      {
-        "config": "logsearch-solr-env/logsearch_solr_znode",
-        "widget": {
-          "type": "directory"
-        }
-      },
-      {
         "config": "logsearch-env/logsearch_app_max_memory",
         "widget": {
           "type": "slider",
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index 3db3256..43c767d 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -68,7 +68,7 @@
 stack_supports_ranger_log4j = check_stack_feature(StackFeature.RANGER_LOG4J_SUPPORT, version_for_stack_feature_checks)
 stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
 stack_supports_usersync_passwd = check_stack_feature(StackFeature.RANGER_USERSYNC_PASSWORD_JCEKS, version_for_stack_feature_checks)
-stack_supports_logsearch_client = check_stack_feature(StackFeature.RANGER_INSTALL_LOGSEARCH_CLIENT, version_for_stack_feature_checks)
+stack_supports_infra_client = check_stack_feature(StackFeature.RANGER_INSTALL_INFRA_CLIENT, version_for_stack_feature_checks)
 stack_supports_pid = check_stack_feature(StackFeature.RANGER_PID_SUPPORT, version_for_stack_feature_checks)
 stack_supports_ranger_admin_password_change = check_stack_feature(StackFeature.RANGER_ADMIN_PASSWD_CHANGE, version_for_stack_feature_checks)
 
@@ -272,24 +272,24 @@
 ranger_solr_shards = config['configurations']['ranger-env']['ranger_solr_shards']
 replication_factor = config['configurations']['ranger-env']['ranger_solr_replication_factor']
 ranger_solr_conf = format('{ranger_home}/contrib/solr_for_audit_setup/conf')
-logsearch_solr_hosts = default("/clusterHostInfo/logsearch_solr_hosts", [])
-has_logsearch = len(logsearch_solr_hosts) > 0
+infra_solr_hosts = default("/clusterHostInfo/infra_solr_hosts", [])
+has_infra_solr = len(infra_solr_hosts) > 0
 is_solrCloud_enabled = default('/configurations/ranger-env/is_solrCloud_enabled', False)
 is_external_solrCloud_enabled = default('/configurations/ranger-env/is_external_solrCloud_enabled', False)
 solr_znode = '/ranger_audits'
-if stack_supports_logsearch_client and is_solrCloud_enabled:
+if stack_supports_infra_client and is_solrCloud_enabled:
   solr_znode = default('/configurations/ranger-admin-site/ranger.audit.solr.zookeepers', 'NONE')
   if solr_znode != '' and solr_znode.upper() != 'NONE':
     solr_znode = solr_znode.split('/')
     if len(solr_znode) > 1 and len(solr_znode) == 2:
       solr_znode = solr_znode[1]
       solr_znode = format('/{solr_znode}')
-  if has_logsearch and not is_external_solrCloud_enabled:
-    solr_znode = config['configurations']['logsearch-solr-env']['logsearch_solr_znode']
+  if has_infra_solr and not is_external_solrCloud_enabled:
+    solr_znode = config['configurations']['infra-solr-env']['infra_solr_znode']
 solr_user = unix_user
-if has_logsearch and not is_external_solrCloud_enabled:
-  solr_user = default('/configurations/logsearch-solr-env/logsearch_solr_user', unix_user)
-custom_log4j = has_logsearch and not is_external_solrCloud_enabled
+if has_infra_solr and not is_external_solrCloud_enabled:
+  solr_user = default('/configurations/infra-solr-env/infra_solr_user', unix_user)
+custom_log4j = has_infra_solr and not is_external_solrCloud_enabled
 
 # get comma separated list of zookeeper hosts
 zookeeper_port = default('/configurations/zoo.cfg/clientPort', None)
@@ -318,11 +318,11 @@
     ranger_admin_principal = config['configurations']['ranger-admin-site']['ranger.admin.kerberos.principal']
     if not is_empty(ranger_admin_principal) and ranger_admin_principal != '':
       ranger_admin_jaas_principal = ranger_admin_principal.replace('_HOST', ranger_host.lower())
-      if stack_supports_logsearch_client and is_solrCloud_enabled and is_external_solrCloud_enabled and is_external_solrCloud_kerberos:
+      if stack_supports_infra_client and is_solrCloud_enabled and is_external_solrCloud_enabled and is_external_solrCloud_kerberos:
         solr_jaas_file = format('{ranger_home}/conf/ranger_solr_jaas.conf')
         solr_kerberos_principal = ranger_admin_jaas_principal
         solr_kerberos_keytab = ranger_admin_keytab
-      if stack_supports_logsearch_client and is_solrCloud_enabled and not is_external_solrCloud_enabled and not is_external_solrCloud_kerberos:
+      if stack_supports_infra_client and is_solrCloud_enabled and not is_external_solrCloud_enabled and not is_external_solrCloud_kerberos:
         solr_jaas_file = format('{ranger_home}/conf/ranger_solr_jaas.conf')
         solr_kerberos_principal = ranger_admin_jaas_principal
         solr_kerberos_keytab = ranger_admin_keytab
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
index c0534f3..fd6899f 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
@@ -86,8 +86,8 @@
     env.set_params(params)
     self.configure(env, upgrade_type=upgrade_type)
 
-    if params.stack_supports_logsearch_client and params.audit_solr_enabled and params.is_solrCloud_enabled:
-      solr_cloud_util.setup_solr_client(params.config, user = params.solr_user, custom_log4j = params.custom_log4j)
+    if params.stack_supports_infra_client and params.audit_solr_enabled and params.is_solrCloud_enabled:
+      solr_cloud_util.setup_solr_client(params.config, custom_log4j = params.custom_log4j)
       setup_ranger_audit_solr()
 
     ranger_service('ranger_admin')
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
index 24ac487..4e402e5 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
@@ -576,7 +576,6 @@
     config_set_dir = params.ranger_solr_conf,
     tmp_dir = params.tmp_dir,
     java64_home = params.java_home,
-    user = params.unix_user,
     jaas_file=params.solr_jaas_file,
     retry=30, interval=5)
 
@@ -586,7 +585,6 @@
     collection = params.ranger_solr_collection_name,
     config_set = params.ranger_solr_config_set,
     java64_home = params.java_home,
-    user = params.unix_user,
     shards = params.ranger_solr_shards,
     replication_factor = int(params.replication_factor),
     jaas_file = params.solr_jaas_file)
@@ -605,5 +603,4 @@
   solr_cloud_util.check_znode(
     zookeeper_quorum=params.zookeeper_quorum,
     solr_znode=params.solr_znode,
-    java64_home=params.java_home,
-    user=params.unix_user)
\ No newline at end of file
+    java64_home=params.java_home)
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/ranger-admin-site.xml b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/ranger-admin-site.xml
index c75f2fd..36ebc8c 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/ranger-admin-site.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/ranger-admin-site.xml
@@ -145,8 +145,8 @@
     <description>Solr Zookeeper string</description>
     <depends-on>
       <property>
-        <type>logsearch-solr-env</type>
-        <name>logsearch_solr_znode</name>
+        <type>infra-solr-env</type>
+        <name>infra_solr_znode</name>
       </property>
       <property>
         <type>ranger-env</type>
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/metainfo.xml b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/metainfo.xml
index 4e7458a..e571067 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/metainfo.xml
@@ -27,6 +27,18 @@
       <version>0.6.0</version>
 
       <components>
+        <component>
+          <name>RANGER_ADMIN</name>
+          <dependencies>
+            <dependency>
+              <name>AMBARI_INFRA/INFRA_SOLR_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+        </component>
 
         <component>
           <name>RANGER_TAGSYNC</name>
@@ -68,8 +80,8 @@
               <condition>should_install_ranger_tagsync</condition>
             </package>
             <package>
-              <name>ambari-logsearch-solr-client</name>
-              <condition>should_install_logsearch_solr_client</condition>
+              <name>ambari-infra-solr-client</name>
+              <condition>should_install_infra_solr_client</condition>
             </package>
           </packages>
         </osSpecific>
@@ -87,8 +99,8 @@
               <condition>should_install_ranger_tagsync</condition>
             </package>
             <package>
-              <name>ambari-logsearch-solr-client</name>
-              <condition>should_install_logsearch_solr_client</condition>
+              <name>ambari-infra-solr-client</name>
+              <condition>should_install_infra_solr_client</condition>
             </package>
           </packages>
         </osSpecific>
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
index 6d26052..b297fa6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
@@ -227,8 +227,8 @@
       "min_version": "2.5.0.0"
     },
     {
-      "name": "ranger_install_logsearch_client",
-      "description": "LogSearch Service support",
+      "name": "ranger_install_infra_client",
+      "description": "Ambari Infra Service support",
       "min_version": "2.5.0.0"
     },
     {
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/AMBARI_INFRA/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/AMBARI_INFRA/metainfo.xml
new file mode 100644
index 0000000..635d525
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/AMBARI_INFRA/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>AMBARI_INFRA</name>
+      <extends>common-services/AMBARI_INFRA/0.1.0</extends>
+    </service>
+  </services>
+</metainfo>
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/AMBARI_INFRA/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/AMBARI_INFRA/role_command_order.json
new file mode 100755
index 0000000..34654c2
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/AMBARI_INFRA/role_command_order.json
@@ -0,0 +1,7 @@
+{
+  "general_deps" : {
+    "_comment" : "dependencies for ambari infra",
+    "INFRA_SOLR-START" : ["ZOOKEEPER_SERVER-START"],
+    "AMBARI_INFRA_CHECK-SERVICE_CHECK": ["INFRA_SOLR-START"]
+  }
+}
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/LOGSEARCH/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.2/services/LOGSEARCH/role_command_order.json
index 705fd10..e294e1b 100755
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/LOGSEARCH/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/LOGSEARCH/role_command_order.json
@@ -1,10 +1,9 @@
 {
   "general_deps" : {
     "_comment" : "dependencies for logsearch",
-    "LOGSEARCH_SOLR-START" : ["ZOOKEEPER_SERVER-START"],
-    "LOGSEARCH_SERVER-START": ["LOGSEARCH_SOLR-START"],
-    "LOGSEARCH_LOGFEEDER-START": ["LOGSEARCH_SOLR-START", "LOGSEARCH_SERVER-START"],
-    "LOGSEARCH_SERVICE_CHECK-SERVICE_CHECK": ["LOGSEARCH_SOLR-START", "LOGSEARCH_SERVER-START"],
-    "LOGSEARCH_SERVER-RESTART" : ["LOGSEARCH_SOLR-RESTART"]
+    "LOGSEARCH_SERVER-START": ["INFRA_SOLR-START", "ZOOKEEPER_SERVER-START"],
+    "LOGSEARCH_LOGFEEDER-START": ["INFRA_SOLR-START", "LOGSEARCH_SERVER-START"],
+    "LOGSEARCH_SERVICE_CHECK-SERVICE_CHECK": ["INFRA_SOLR-START", "LOGSEARCH_SERVER-START"],
+    "LOGSEARCH_SERVER-RESTART" : ["INFRA_SOLR-RESTART"]
   }
 }
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index a5d386e..ea62eb7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -997,13 +997,13 @@
 
   def recommendLogsearchConfigurations(self, configurations, clusterData, services, hosts):
     putLogsearchProperty = self.putProperty(configurations, "logsearch-properties", services)
-    logsearchSolrHosts = self.getComponentHostNames(services, "LOGSEARCH", "LOGSEARCH_SOLR")
+    infraSolrHosts = self.getComponentHostNames(services, "AMBARI_INFRA", "INFRA_SOLR")
 
-    if logsearchSolrHosts is not None and len(logsearchSolrHosts) > 0 \
+    if infraSolrHosts is not None and len(infraSolrHosts) > 0 \
       and "logsearch-properties" in services["configurations"]:
-      recommendedMinShards = len(logsearchSolrHosts)
-      recommendedShards = 2 * len(logsearchSolrHosts)
-      recommendedMaxShards = 3 * len(logsearchSolrHosts)
+      recommendedMinShards = len(infraSolrHosts)
+      recommendedShards = 2 * len(infraSolrHosts)
+      recommendedMaxShards = 3 * len(infraSolrHosts)
       # recommend number of shard
       putLogsearchAttribute = self.putPropertyAttribute(configurations, "logsearch-properties")
       putLogsearchAttribute('logsearch.collection.service.logs.numshards', 'minimum', recommendedMinShards)
@@ -1014,7 +1014,7 @@
       putLogsearchAttribute('logsearch.collection.audit.logs.numshards', 'maximum', recommendedMaxShards)
       putLogsearchProperty("logsearch.collection.audit.logs.numshards", recommendedShards)
       # recommend replication factor
-      replicationReccomendFloat = math.log(len(logsearchSolrHosts), 5)
+      replicationReccomendFloat = math.log(len(infraSolrHosts), 5)
       recommendedReplicationFactor = int(1 + math.floor(replicationReccomendFloat))
       putLogsearchProperty("logsearch.collection.service.logs.replication.factor", recommendedReplicationFactor)
       putLogsearchProperty("logsearch.collection.audit.logs.replication.factor", recommendedReplicationFactor)
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json
index 7ac26cd..edd6d64 100755
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json
@@ -9,7 +9,7 @@
     "RANGER_KMS_SERVICE_CHECK-SERVICE_CHECK" : ["RANGER_KMS_SERVER-START"],
     "PHOENIX_QUERY_SERVER-START": ["HBASE_MASTER-START"],
     "ATLAS_SERVICE_CHECK-SERVICE_CHECK": ["ATLAS_SERVER-START"],
-    "ATLAS_SERVER-START": ["KAFKA_BROKER-START", "LOGSEARCH_SOLR-START", "HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
+    "ATLAS_SERVER-START": ["KAFKA_BROKER-START", "INFRA_SOLR-START", "HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
     "SPARK_THRIFTSERVER-START" : ["NAMENODE-START", "HIVE_METASTORE-START"],
     "RESOURCEMANAGER-STOP" : ["SPARK_THRIFTSERVER-STOP"],
     "HAWQMASTER-START" : ["NAMENODE-START", "DATANODE-START", "NODEMANAGER-START"],
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
index 1f1716d..0e1319a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
@@ -11,7 +11,7 @@
     "NAMENODE-STOP": ["HIVE_SERVER_INTERACTIVE-STOP"],
     "HIVE_SERVER_INTERACTIVE-RESTART": ["NODEMANAGER-RESTART", "MYSQL_SERVER-RESTART"],
     "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START", "HIVE_SERVER_INTERACTIVE-START"],
-    "RANGER_ADMIN-START": ["ZOOKEEPER_SERVER-START", "LOGSEARCH_SOLR-START"],
+    "RANGER_ADMIN-START": ["ZOOKEEPER_SERVER-START", "INFRA_SOLR-START"],
     "LIVY_SERVER-START" : ["NAMENODE-START", "DATANODE-START", "APP_TIMELINE_SERVER-START"],
     "SPARK_SERVICE_CHECK-SERVICE_CHECK" : ["SPARK_JOBHISTORYSERVER-START", "LIVY_SERVER-START"],
     "SPARK2_SERVICE_CHECK-SERVICE_CHECK" : ["SPARK2_JOBHISTORYSERVER-START", "APP_TIMELINE_SERVER-START"],
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/configuration/atlas-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/configuration/atlas-env.xml
index 5fbe9ec..bd0da9f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/configuration/atlas-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/configuration/atlas-env.xml
@@ -91,7 +91,7 @@
   <property>
     <name>atlas_solr_shards</name>
     <value>1</value>
-    <description>The number of shards set for LogSearch SOLR.</description>
+    <description>The number of shards set for Infra SOLR.</description>
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml
index 0183f06..4ba59d5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml
@@ -30,8 +30,8 @@
               <name>atlas-metadata_${stack_version}</name>
             </package>
             <package>
-              <name>ambari-logsearch-solr-client</name>
-              <condition>should_install_logsearch_solr_client</condition>
+              <name>ambari-infra-solr-client</name>
+              <condition>should_install_infra_solr_client</condition>
             </package>
           </packages>
         </osSpecific>
@@ -42,14 +42,14 @@
               <name>atlas-metadata-${stack_version}</name>
             </package>
             <package>
-              <name>ambari-logsearch-solr-client</name>
-              <condition>should_install_logsearch_solr_client</condition>
+              <name>ambari-infra-solr-client</name>
+              <condition>should_install_infra_solr_client</condition>
             </package>
           </packages>
         </osSpecific>
       </osSpecifics>
 
-<configuration-dependencies>
+      <configuration-dependencies>
         <config-type>ranger-atlas-audit</config-type>
         <config-type>ranger-atlas-plugin-properties</config-type>
         <config-type>ranger-atlas-policymgr-ssl</config-type>
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index fc9bd94..69d6632 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -162,7 +162,7 @@
             not application_properties['atlas.graph.index.search.solr.zookeeper-url']:
       validationItems.append({"config-name": "atlas.graph.index.search.solr.zookeeper-url",
                               "item": self.getErrorItem(
-                                  "If LOGSEARCH is not installed then the SOLR zookeeper url configuration must be specified.")})
+                                  "If AMBARI_INFRA is not installed then the SOLR zookeeper url configuration must be specified.")})
 
     if not application_properties['atlas.kafka.bootstrap.servers']:
       validationItems.append({"config-name": "atlas.kafka.bootstrap.servers",
@@ -489,12 +489,11 @@
     if atlas_rest_address is not None:
       putAtlasApplicationProperty("atlas.rest.address", atlas_rest_address)
 
-    if "LOGSEARCH" in servicesList and 'logsearch-solr-env' in services['configurations']:
-
-      if 'logsearch_solr_znode' in services['configurations']['logsearch-solr-env']['properties']:
-        logsearch_solr_znode = services['configurations']['logsearch-solr-env']['properties']['logsearch_solr_znode']
+    if "AMBARI_INFRA" in servicesList and 'infra-solr-env' in services['configurations']:
+      if 'infra_solr_znode' in services['configurations']['infra-solr-env']['properties']:
+        infra_solr_znode = services['configurations']['infra-solr-env']['properties']['infra_solr_znode']
       else:
-        logsearch_solr_znode = None
+        infra_solr_znode = None
 
       zookeeper_hosts = self.getHostNamesWithComponent("ZOOKEEPER", "ZOOKEEPER_SERVER", services)
       zookeeper_host_arr = []
@@ -502,8 +501,8 @@
       zookeeper_port = self.getZKPort(services)
       for i in range(len(zookeeper_hosts)):
         zookeeper_host = zookeeper_hosts[i] + ':' + zookeeper_port
-        if logsearch_solr_znode is not None:
-          zookeeper_host += logsearch_solr_znode
+        if infra_solr_znode is not None:
+          zookeeper_host += infra_solr_znode
         zookeeper_host_arr.append(zookeeper_host)
 
       solr_zookeeper_url = ",".join(zookeeper_host_arr)
@@ -1734,17 +1733,16 @@
 
     ranger_audit_zk_port = ''
 
-    #TODO to change check for LOGSEARCH after implemenation of AMBARI-17822
-    if 'LOGSEARCH' in servicesList and zookeeper_host_port and is_solr_cloud_enabled and not is_external_solr_cloud_enabled:
+    if 'AMBARI_INFRA' in servicesList and zookeeper_host_port and is_solr_cloud_enabled and not is_external_solr_cloud_enabled:
       zookeeper_host_port = zookeeper_host_port.split(',')
       zookeeper_host_port.sort()
       zookeeper_host_port = ",".join(zookeeper_host_port)
-      logsearch_solr_znode = '/ambari-solr'
+      infra_solr_znode = '/infra-solr'
 
-      if 'logsearch-solr-env' in services['configurations'] and \
-        ('logsearch_solr_znode' in services['configurations']['logsearch-solr-env']['properties']):
-        logsearch_solr_znode = services['configurations']['logsearch-solr-env']['properties']['logsearch_solr_znode']
-        ranger_audit_zk_port = '{0}{1}'.format(zookeeper_host_port, logsearch_solr_znode)
+      if 'infra-solr-env' in services['configurations'] and \
+        ('infra_solr_znode' in services['configurations']['infra-solr-env']['properties']):
+        infra_solr_znode = services['configurations']['infra-solr-env']['properties']['infra_solr_znode']
+        ranger_audit_zk_port = '{0}{1}'.format(zookeeper_host_port, infra_solr_znode)
       putRangerAdminProperty('ranger.audit.solr.zookeepers', ranger_audit_zk_port)
     elif zookeeper_host_port and is_solr_cloud_enabled and is_external_solr_cloud_enabled:
       ranger_audit_zk_port = '{0}/{1}'.format(zookeeper_host_port, 'ranger_audits')
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
index 6ac3ea0..8263f72 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
@@ -942,7 +942,7 @@
 
     String zookeeperServerRoleCommand = Role.ZOOKEEPER_SERVER + "-" + RoleCommand.START;
     String logsearchServerRoleCommand = Role.LOGSEARCH_SERVER + "-" + RoleCommand.START;
-    String logsearchSolrRoleCommand = Role.LOGSEARCH_SOLR + "-" + RoleCommand.START;
+    String infraSolrRoleCommand = Role.INFRA_SOLR + "-" + RoleCommand.START;
     String logsearchLogfeederRoleCommand = Role.LOGSEARCH_LOGFEEDER + "-" + RoleCommand.START;
 
     StackInfo hdp = stackManager.getStack("HDP", "2.3");
@@ -965,16 +965,16 @@
 
     // verify logsearch rco
     // LogSearch Solr
-    ArrayList<String> logsearchSolrBlockers = (ArrayList<String>) generalDeps.get(logsearchSolrRoleCommand);
-    assertTrue(logsearchSolrRoleCommand + " should be dependent of " + zookeeperServerRoleCommand, logsearchSolrBlockers.contains(zookeeperServerRoleCommand));
+    ArrayList<String> logsearchSolrBlockers = (ArrayList<String>) generalDeps.get(infraSolrRoleCommand);
+    assertTrue(infraSolrRoleCommand + " should be dependent of " + zookeeperServerRoleCommand, logsearchSolrBlockers.contains(zookeeperServerRoleCommand));
 
     // LogSearch Server
     ArrayList<String> logsearchServerBlockers = (ArrayList<String>) generalDeps.get(logsearchServerRoleCommand);
-    assertTrue(logsearchServerRoleCommand + " should be dependent of " + logsearchSolrRoleCommand, logsearchServerBlockers.contains(logsearchSolrRoleCommand));
+    assertTrue(logsearchServerRoleCommand + " should be dependent of " + infraSolrRoleCommand, logsearchServerBlockers.contains(infraSolrRoleCommand));
 
     // LogSearch LogFeeder
     ArrayList<String> logsearchLogfeederBlockers = (ArrayList<String>) generalDeps.get(logsearchLogfeederRoleCommand);
-    assertTrue(logsearchLogfeederRoleCommand + " should be dependent of " + logsearchSolrRoleCommand, logsearchLogfeederBlockers.contains(logsearchSolrRoleCommand));
+    assertTrue(logsearchLogfeederRoleCommand + " should be dependent of " + infraSolrRoleCommand, logsearchLogfeederBlockers.contains(infraSolrRoleCommand));
     assertTrue(logsearchLogfeederRoleCommand + " should be dependent of " + logsearchServerRoleCommand, logsearchLogfeederBlockers.contains(logsearchServerRoleCommand));
   }
 }
diff --git a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
index 9ee6c7b..44841f4 100644
--- a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
@@ -126,58 +126,46 @@
                                 group=u'hadoop',
                                 mode=0644,
       )
-      self.assertResourceCalled('Directory', '/var/log/ambari-logsearch-solr-client',
-                                owner='solr',
-                                group='hadoop',
+      self.assertResourceCalled('Directory', '/var/log/ambari-infra-solr-client',
                                 create_parents = True,
                                 cd_access='a',
                                 mode=0755
       )
-      self.assertResourceCalled('Directory', '/usr/lib/ambari-logsearch-solr-client',
-                                owner='solr',
-                                group='hadoop',
+      self.assertResourceCalled('Directory', '/usr/lib/ambari-infra-solr-client',
                                 create_parents = True,
                                 recursive_ownership = True,
                                 cd_access='a',
                                 mode=0755
       )
-      self.assertResourceCalled('File', '/usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh',
-                                content=StaticFile('/usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh'),
-                                owner='solr',
-                                group='hadoop',
+      self.assertResourceCalled('File', '/usr/lib/ambari-infra-solr-client/solrCloudCli.sh',
+                                content=StaticFile('/usr/lib/ambari-infra-solr-client/solrCloudCli.sh'),
                                 mode=0755,
                                 )
-      self.assertResourceCalled('File', '/usr/lib/ambari-logsearch-solr-client/log4j.properties',
+      self.assertResourceCalled('File', '/usr/lib/ambari-infra-solr-client/log4j.properties',
                                 content=InlineTemplate(self.getConfig()['configurations'][
-                                    'logsearch-solr-client-log4j']['content']),
-                                owner='solr',
-                                group='hadoop',
+                                    'infra-solr-client-log4j']['content']),
                                 mode=0644,
       )
-      self.assertResourceCalled('File', '/var/log/ambari-logsearch-solr-client/solr-client.log',
-                                owner='solr',
-                                group='hadoop',
+      self.assertResourceCalled('File', '/var/log/ambari-infra-solr-client/solr-client.log',
                                 mode=0664,
                                 content=''
       )
-      self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /logsearch --check-znode --retry 5 --interval 10')
-      self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --download-config --config-dir /tmp/solr_config_basic_configs_0.[0-9]* --config-set basic_configs --retry 30 --interval 5')
+      self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --check-znode --retry 5 --interval 10')
+      self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --download-config --config-dir /tmp/solr_config_basic_configs_0.[0-9]* --config-set basic_configs --retry 30 --interval 5')
       self.assertResourceCalledRegexp('^File$', '^/tmp/solr_config_basic_configs_0.[0-9]*',
                                       content=InlineTemplate(self.getConfig()['configurations']['atlas-solrconfig']['content']),
-                                      owner='atlas',
                                       only_if='test -d /tmp/solr_config_basic_configs_0.[0-9]*')
-      self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --upload-config --config-dir /tmp/solr_config_basic_configs_0.[0-9]* --config-set basic_configs --retry 30 --interval 5',
+      self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /tmp/solr_config_basic_configs_0.[0-9]* --config-set basic_configs --retry 30 --interval 5',
                                       only_if='test -d /tmp/solr_config_basic_configs_0.[0-9]*')
-      self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --upload-config --config-dir /etc/atlas/conf/solr --config-set basic_configs --retry 30 --interval 5',
+      self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /etc/atlas/conf/solr --config-set basic_configs --retry 30 --interval 5',
                                       not_if='test -d /tmp/solr_config_basic_configs_0.[0-9]*')
       self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_basic_configs_0.[0-9]*',
                                       action=['delete'],
-                                      owner='atlas',
                                       create_parents=True)
 
-      self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --create-collection --collection vertex_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
-      self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --create-collection --collection edge_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
-      self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --create-collection --collection fulltext_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+      self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection vertex_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+      self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection edge_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+      self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection fulltext_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
 
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/metadata_server.py",
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index 15c19d1..f243f15 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -1991,9 +1991,9 @@
     services = {
       "services": [
         {
-          "href": "/api/v1/stacks/HDP/versions/2.3/services/LOGSEARCH",
+          "href": "/api/v1/stacks/HDP/versions/2.3/services/AMBARI_INFRA",
           "StackServices": {
-            "service_name": "LOGSEARCH",
+            "service_name": "AMBARI_INFRA",
             "service_version": "2.6.0.2.2",
             "stack_name": "HDP",
             "stack_version": "2.3"
@@ -2004,8 +2004,8 @@
                 "advertise_version": "false",
                 "cardinality": "1",
                 "component_category": "MASTER",
-                "component_name": "LOGSEARCH_SOLR",
-                "display_name": "Log Search Solr Instance",
+                "component_name": "INFRA_SOLR",
+                "display_name": "Infra Solr Instance",
                 "is_client": "false",
                 "is_master": "true",
                 "hostnames": []
diff --git a/ambari-server/src/test/python/stacks/2.3/configs/default.json b/ambari-server/src/test/python/stacks/2.3/configs/default.json
index 9764a47..9666c31 100644
--- a/ambari-server/src/test/python/stacks/2.3/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.3/configs/default.json
@@ -171,13 +171,14 @@
         "log.index.interval.bytes": "4096",
         "log.retention.hours": "168"
       },
-      "logsearch-solr-env": {
-        "logsearch_solr_znode": "/logsearch",
-        "logsearch_solr_user": "solr",
-        "logsearch_solr_group": "solr",
-        "logsearch_solr_client_log_dir" :"/var/log/ambari-logsearch-solr-client"
+      "infra-solr-env": {
+        "infra_solr_znode": "/infra-solr",
+        "infra_solr_user": "solr",
+        "infra_solr_group": "solr",
+        "infra_solr_client_log_dir" :"/var/log/ambari-infra-solr-client"
       },
-      "logsearch-solr-client-log4j" : {
+      "infra-solr-client-log4j" : {
+        "infra_solr_client_log_dir" : "/var/log/ambari-infra-solr-client",
         "content" : "content"
       },
       "application-properties": {
@@ -341,7 +342,7 @@
         "kafka_broker_hosts": [
           "c6401.ambari.apache.org"
         ],
-        "logsearch_solr_hosts": [
+        "infra_solr_hosts": [
           "c6401.ambari.apache.org"
         ],
         "zookeeper_hosts": [
diff --git a/ambari-server/src/test/python/stacks/2.3/configs/secure.json b/ambari-server/src/test/python/stacks/2.3/configs/secure.json
index 2545505..fe4ca92 100644
--- a/ambari-server/src/test/python/stacks/2.3/configs/secure.json
+++ b/ambari-server/src/test/python/stacks/2.3/configs/secure.json
@@ -162,13 +162,14 @@
       "log.index.interval.bytes": "4096",
       "log.retention.hours": "168"
     },
-    "logsearch-solr-env": {
-      "logsearch_solr_znode": "/logsearch",
-      "logsearch_solr_user": "solr",
-      "logsearch_solr_group": "solr",
-      "logsearch_solr_client_log_dir" :"/var/log/ambari-logsearch-solr-client"
+    "infra-solr-env": {
+      "infra_solr_znode": "/infra-solr",
+      "infra_solr_user": "solr",
+      "infra_solr_group": "solr",
+      "infra_solr_client_log_dir" :"/var/log/ambari-infra-solr-client"
     },
-    "logsearch-solr-client-log4j" : {
+    "infra-solr-client-log4j" : {
+      "infra_solr_client_log_dir" : "/var/log/ambari-infra-solr-client",
       "content" : "content"
     },
     "application-properties": {
@@ -333,7 +334,7 @@
     "kafka_broker_hosts": [
       "c6401.ambari.apache.org"
     ],
-    "logsearch_solr_hosts": [
+    "infra_solr_hosts": [
       "c6401.ambari.apache.org"
     ],
     "zookeeper_hosts": [
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_solr.py b/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
similarity index 61%
rename from ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_solr.py
rename to ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
index 6319be8..005283f 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_solr.py
+++ b/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
@@ -22,40 +22,40 @@
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.libraries.script.config_dictionary import UnknownConfiguration
 
-class TestSolr(RMFTestCase):
-  COMMON_SERVICES_PACKAGE_DIR = "LOGSEARCH/0.5.0/package"
+class TestInfraSolr(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "AMBARI_INFRA/0.1.0/package"
   STACK_VERSION = "2.4"
 
   def configureResourcesCalled(self):
-      self.assertResourceCalled('Directory', '/var/log/ambari-logsearch-solr',
+      self.assertResourceCalled('Directory', '/var/log/ambari-infra-solr',
                                 owner = 'solr',
                                 group = 'hadoop',
                                 create_parents = True,
                                 cd_access = 'a',
                                 mode = 0755
       )
-      self.assertResourceCalled('Directory', '/var/run/ambari-logsearch-solr',
+      self.assertResourceCalled('Directory', '/var/run/ambari-infra-solr',
                                 owner = 'solr',
                                 group = 'hadoop',
                                 create_parents = True,
                                 cd_access = 'a',
                                 mode = 0755
       )
-      self.assertResourceCalled('Directory', '/opt/logsearch_solr/data',
+      self.assertResourceCalled('Directory', '/opt/ambari_infra_solr/data',
                                 owner = 'solr',
                                 group = 'hadoop',
                                 create_parents = True,
                                 cd_access = 'a',
                                 mode = 0755
       )
-      self.assertResourceCalled('Directory', '/opt/logsearch_solr/data/resources',
+      self.assertResourceCalled('Directory', '/opt/ambari_infra_solr/data/resources',
                                 owner = 'solr',
                                 group = 'hadoop',
                                 create_parents = True,
                                 cd_access = 'a',
                                 mode = 0755
       )
-      self.assertResourceCalled('Directory', '/usr/lib/ambari-logsearch-solr',
+      self.assertResourceCalled('Directory', '/usr/lib/ambari-infra-solr',
                                 owner = 'solr',
                                 group = 'hadoop',
                                 create_parents = True,
@@ -63,7 +63,7 @@
                                 cd_access = 'a',
                                 mode = 0755
                                 )
-      self.assertResourceCalled('Directory', '/etc/ambari-logsearch-solr/conf',
+      self.assertResourceCalled('Directory', '/etc/ambari-infra-solr/conf',
                                 owner = 'solr',
                                 group = 'hadoop',
                                 create_parents = True,
@@ -72,43 +72,36 @@
                                 mode = 0755
                                 )
       
-      self.assertResourceCalled('File', '/var/log/ambari-logsearch-solr/solr-install.log',
+      self.assertResourceCalled('File', '/var/log/ambari-infra-solr/solr-install.log',
                                 owner = 'solr',
                                 group = 'hadoop',
                                 mode = 0644,
                                 content = ''
       )
-      self.assertResourceCalled('File', '/etc/ambari-logsearch-solr/conf/logsearch-solr-env.sh',
+      self.assertResourceCalled('File', '/etc/ambari-infra-solr/conf/infra-solr-env.sh',
                                 owner = 'solr',
                                 group='hadoop',
                                 mode = 0755,
-                                content = InlineTemplate(self.getConfig()['configurations']['logsearch-solr-env']['content'])
+                                content = InlineTemplate(self.getConfig()['configurations']['infra-solr-env']['content'])
       )
-      self.assertResourceCalled('File', '/opt/logsearch_solr/data/solr.xml',
+      self.assertResourceCalled('File', '/opt/ambari_infra_solr/data/solr.xml',
                                 owner = 'solr',
                                 group='hadoop',
-                                content = InlineTemplate(self.getConfig()['configurations']['logsearch-solr-xml']['content'])
+                                content = InlineTemplate(self.getConfig()['configurations']['infra-solr-xml']['content'])
       )
-      self.assertResourceCalled('File', '/etc/ambari-logsearch-solr/conf/log4j.properties',
+      self.assertResourceCalled('File', '/etc/ambari-infra-solr/conf/log4j.properties',
                                 owner = 'solr',
                                 group='hadoop',
-                                content = InlineTemplate(self.getConfig()['configurations']['logsearch-solr-log4j']['content'])
+                                content = InlineTemplate(self.getConfig()['configurations']['infra-solr-log4j']['content'])
       )
-      self.assertResourceCalled('File', '/opt/logsearch_solr/data/zoo.cfg',
-                                owner = 'solr',
-                                group='hadoop',
-                                content = Template('zoo.cfg.j2')
-      )
-      self.assertResourceCalled('Execute', 'export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /logsearch --create-znode --retry 5 --interval 10',
-                                user = "solr")
-      self.assertResourceCalled('Execute', 'export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --cluster-prop --property-name urlScheme --property-value http',
-                                user = "solr")
-      self.assertResourceCalled('Execute', 'export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /logsearch --setup-kerberos-plugin',
-                                user = "solr")
+
+      self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --create-znode --retry 5 --interval 10')
+      self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --cluster-prop --property-name urlScheme --property-value http')
+      self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --setup-kerberos-plugin')
 
   def test_configure_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/logsearch_solr.py",
-                       classname = "LogsearchSolr",
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/infra_solr.py",
+                       classname = "InfraSolr",
                        command = "configure",
                        config_file = "default.json",
                        stack_version = self.STACK_VERSION,
@@ -119,8 +112,8 @@
     self.assertNoMoreResources()
   
   def test_start_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/logsearch_solr.py",
-                       classname = "LogsearchSolr",
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/infra_solr.py",
+                       classname = "InfraSolr",
                        command = "start",
                        config_file = "default.json",
                        stack_version = self.STACK_VERSION,
@@ -128,25 +121,25 @@
     )
     
     self.configureResourcesCalled()
-    self.assertResourceCalled('Execute', "/usr/lib/ambari-logsearch-solr/bin/solr start -cloud -noprompt -s /opt/logsearch_solr/data >> /var/log/ambari-logsearch-solr/solr-install.log 2>&1",
-                              environment = {'SOLR_INCLUDE': '/etc/ambari-logsearch-solr/conf/logsearch-solr-env.sh'},
+    self.assertResourceCalled('Execute', "/usr/lib/ambari-infra-solr/bin/solr start -cloud -noprompt -s /opt/ambari_infra_solr/data >> /var/log/ambari-infra-solr/solr-install.log 2>&1",
+                              environment = {'SOLR_INCLUDE': '/etc/ambari-infra-solr/conf/infra-solr-env.sh'},
                               user = "solr"
     )
   
   def test_stop_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/logsearch_solr.py",
-                       classname = "LogsearchSolr",
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/infra_solr.py",
+                       classname = "InfraSolr",
                        command = "stop",
                        config_file = "default.json",
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     
-    self.assertResourceCalled('Execute', '/usr/lib/ambari-logsearch-solr/bin/solr stop -all >> /var/log/ambari-logsearch-solr/solr-install.log',
-                              environment = {'SOLR_INCLUDE': '/etc/ambari-logsearch-solr/conf/logsearch-solr-env.sh'},
+    self.assertResourceCalled('Execute', '/usr/lib/ambari-infra-solr/bin/solr stop -all >> /var/log/ambari-infra-solr/solr-install.log',
+                              environment = {'SOLR_INCLUDE': '/etc/ambari-infra-solr/conf/infra-solr-env.sh'},
                               user = "solr",
-                              only_if = "test -f /var/run/ambari-logsearch-solr/solr-8886.pid"
+                              only_if = "test -f /var/run/ambari-infra-solr/solr-8886.pid"
     )
-    self.assertResourceCalled('File', '/var/run/ambari-logsearch-solr/solr-8886.pid',
+    self.assertResourceCalled('File', '/var/run/ambari-infra-solr/solr-8886.pid',
                               action = ['delete']
     )
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
index 15b8ac3..f4dbd63 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
@@ -75,7 +75,7 @@
                               encoding='utf-8'
                               )
 
-    logfeeder_supported_services = ['accumulo', 'ambari', 'ams', 'atlas', 'falcon', 'flume', 'hbase', 'hdfs', 'hive', 'hst', 'kafka',
+    logfeeder_supported_services = ['accumulo', 'ambari', 'ams', 'atlas', 'falcon', 'flume', 'hbase', 'hdfs', 'hive', 'hst', 'infra', 'kafka',
                                     'knox', 'logsearch', 'nifi', 'oozie', 'ranger', 'spark', 'spark2', 'storm', 'yarn', 'zeppelin', 'zookeeper']
 
     logfeeder_config_file_names = ['global.config.json', 'output.config.json'] + ['input.config-%s.json' % (tag) for tag
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
index 65164c7..5a2e6e8 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
@@ -107,46 +107,40 @@
                               content = InlineTemplate(self.getConfig()['configurations']['logsearch-audit_logs-solrconfig']['content'])
                               )
 
-    self.assertResourceCalled('Execute', 'export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /logsearch --check-znode --retry 5 --interval 10',
-                              user = "solr")
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --download-config --config-dir /tmp/solr_config_hadoop_logs_0.[0-9]* --config-set hadoop_logs --retry 30 --interval 5')
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --check-znode --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --download-config --config-dir /tmp/solr_config_hadoop_logs_0.[0-9]* --config-set hadoop_logs --retry 30 --interval 5')
     self.assertResourceCalledRegexp('^File$', '^/tmp/solr_config_hadoop_logs_0.[0-9]*',
                                     content=InlineTemplate(self.getConfig()['configurations']['logsearch-service_logs-solrconfig']['content']),
-                                    owner='solr',
                                     only_if='test -d /tmp/solr_config_hadoop_logs_0.[0-9]*')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --upload-config --config-dir /tmp/solr_config_hadoop_logs_0.[0-9]* --config-set hadoop_logs --retry 30 --interval 5',
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /tmp/solr_config_hadoop_logs_0.[0-9]* --config-set hadoop_logs --retry 30 --interval 5',
                                     only_if='test -d /tmp/solr_config_hadoop_logs_0.[0-9]*')
 
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --upload-config --config-dir /etc/ambari-logsearch-portal/conf/solr_configsets/hadoop_logs/conf --config-set hadoop_logs --retry 30 --interval 5',
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /etc/ambari-logsearch-portal/conf/solr_configsets/hadoop_logs/conf --config-set hadoop_logs --retry 30 --interval 5',
                                     not_if='test -d /tmp/solr_config_hadoop_logs_0.[0-9]*')
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_hadoop_logs_0.[0-9]*',
                                     action=['delete'],
-                                    owner='solr',
                                     create_parents=True)
 
 
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --download-config --config-dir /tmp/solr_config_history_0.[0-9]* --config-set history --retry 30 --interval 5')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --upload-config --config-dir /etc/ambari-logsearch-portal/conf/solr_configsets/history/conf --config-set history --retry 30 --interval 5')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --download-config --config-dir /tmp/solr_config_history_0.[0-9]* --config-set history --retry 30 --interval 5')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /etc/ambari-logsearch-portal/conf/solr_configsets/history/conf --config-set history --retry 30 --interval 5')
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_history_0.[0-9]*',
                                     action=['delete'],
-                                    owner='solr',
                                     create_parents=True)
 
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --download-config --config-dir /tmp/solr_config_audit_logs_0.[0-9]* --config-set audit_logs --retry 30 --interval 5')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --download-config --config-dir /tmp/solr_config_audit_logs_0.[0-9]* --config-set audit_logs --retry 30 --interval 5')
     self.assertResourceCalledRegexp('^File$', '^/tmp/solr_config_audit_logs_0.[0-9]*',
                                     content=InlineTemplate(self.getConfig()['configurations']['logsearch-audit_logs-solrconfig']['content']),
-                                    owner='solr',
                                     only_if='test -d /tmp/solr_config_audit_logs_0.[0-9]*')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --upload-config --config-dir /tmp/solr_config_audit_logs_0.[0-9]* --config-set audit_logs --retry 30 --interval 5',
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /tmp/solr_config_audit_logs_0.[0-9]* --config-set audit_logs --retry 30 --interval 5',
                                     only_if='test -d /tmp/solr_config_audit_logs_0.[0-9]*')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --upload-config --config-dir /etc/ambari-logsearch-portal/conf/solr_configsets/audit_logs/conf --config-set audit_logs --retry 30 --interval 5',
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /etc/ambari-logsearch-portal/conf/solr_configsets/audit_logs/conf --config-set audit_logs --retry 30 --interval 5',
                                     not_if='test -d /tmp/solr_config_audit_logs_0.[0-9]*')
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_audit_logs_0.[0-9]*',
                                     action=['delete'],
-                                    owner='solr',
                                     create_parents=True)
     self.assertResourceCalled('Execute', ('chmod', '-R', 'ugo+r', '/etc/ambari-logsearch-portal/conf/solr_configsets'),
-                              sudo=True
+                              sudo = True
     )
 
   def test_configure_default(self):
diff --git a/ambari-server/src/test/python/stacks/2.4/configs/default.json b/ambari-server/src/test/python/stacks/2.4/configs/default.json
index 462ee56..a016ce0 100644
--- a/ambari-server/src/test/python/stacks/2.4/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.4/configs/default.json
@@ -234,27 +234,28 @@
         "logsearch_admin_password" : "admin",
         "content": "admin json content"
       },
-      "logsearch-solr-client-log4j" : {
+      "infra-solr-client-log4j" : {
+        "infra_solr_client_log_dir" : "/var/log/ambari-infra-solr-client",
         "content" : "content"
       },
-      "logsearch-solr-env": {
-        "logsearch_solr_user": "solr",
-        "logsearch_solr_group": "solr",
-        "logsearch_solr_port": "8886",
-        "logsearch_solr_minmem": "512",
-        "logsearch_solr_maxmem": "512",
-        "logsearch_solr_znode": "/logsearch",
-        "logsearch_solr_conf": "/etc/ambari-logsearch-solr",
-        "logsearch_solr_pid_dir": "/var/run/ambari-logsearch-solr",
-        "logsearch_solr_datadir": "/opt/logsearch_solr/data",
-        "logsearch_solr_log_dir": "/var/log/ambari-logsearch-solr",
-        "logsearch_solr_client_log_dir": "/var/log/ambari-logsearch-solr-client",
-        "content": "# By default the script will use JAVA_HOME to determine which java\n# to use, but you can set a specific path for Solr to use without\n# affecting other Java applications on your server/workstation.\nSOLR_JAVA_HOME={{java64_home}}\n\n# Increase Java Min/Max Heap as needed to support your indexing / query needs\nSOLR_JAVA_MEM=\"-Xms{{solr_min_mem}} -Xmx{{solr_max_mem}}\"\n\n# Enable verbose GC logging\nGC_LOG_OPTS=\"-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \\\n-XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime\"\n\n# These GC settings have shown to work well for a number of common Solr workloads\nGC_TUNE=\"-XX:NewRatio=3 \\\n-XX:SurvivorRatio=4 \\\n-XX:TargetSurvivorRatio=90 \\\n-XX:MaxTenuringThreshold=8 \\\n-XX:+UseConcMarkSweepGC \\\n-XX:+UseParNewGC \\\n-XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 \\\n-XX:+CMSScavengeBeforeRemark \\\n-XX:PretenureSizeThreshold=64m \\\n-XX:+UseCMSInitiatingOccupancyOnly \\\n-XX:CMSInitiatingOccupancyFraction=50 \\\n-XX:CMSMaxAbortablePrecleanTime=6000 \\\n-XX:+CMSParallelRemarkEnabled \\\n-XX:+ParallelRefProcEnabled\"\n\n# Set the ZooKeeper connection string if using an external ZooKeeper ensemble\n# e.g. host1:2181,host2:2181/chroot\n# Leave empty if not using SolrCloud\nZK_HOST=\"{{zookeeper_quorum}}{{solr_znode}}\"\n\n# Set the ZooKeeper client timeout (for SolrCloud mode)\nZK_CLIENT_TIMEOUT=\"60000\"\n\n# By default the start script uses \"localhost\"; override the hostname here\n# for production SolrCloud environments to control the hostname exposed to cluster state\n#SOLR_HOST=\"192.168.1.1\"\n\n# By default the start script uses UTC; override the timezone if needed\n#SOLR_TIMEZONE=\"UTC\"\n\n# Set to true to activate the JMX RMI connector to allow remote JMX client applications\n# to monitor the JVM hosting Solr; set to \"false\" to disable that behavior\n# (false is recommended in production environments)\nENABLE_REMOTE_JMX_OPTS=\"false\"\n\n# The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here\n# RMI_PORT=18983\n\n# Anything you add to the SOLR_OPTS variable will be included in the java\n# start command line as-is, in ADDITION to other options. If you specify the\n# -a option on start script, those options will be appended as well. Examples:\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.autoSoftCommit.maxTime=3000\"\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.autoCommit.maxTime=60000\"\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.clustering.enabled=true\"\n\n# Location where the bin/solr script will save PID files for running instances\n# If not set, the script will create PID files in $SOLR_TIP/bin\nSOLR_PID_DIR={{solr_piddir}}\n\n# Path to a directory where Solr creates index files, the specified directory\n# must contain a solr.xml; by default, Solr will use server/solr\nSOLR_HOME={{logsearch_solr_datadir}}\n\n# Solr provides a default Log4J configuration properties file in server/resources\n# however, you may want to customize the log settings and file appender location\n# so you can point the script to use a different log4j.properties file\nLOG4J_PROPS={{logsearch_solr_datadir}}/resources/log4j.properties\n\n# Location where Solr should write logs to; should agree with the file appender\n# settings in server/resources/log4j.properties\nSOLR_LOGS_DIR={{solr_log_dir}}\n\n# Sets the port Solr binds to, default is 8983\nSOLR_PORT={{solr_port}}\n\n# Be sure to update the paths to the correct keystore for your environment\n{% if logsearch_solr_ssl_enabled %}\nSOLR_SSL_KEY_STORE={{logsearch_solr_keystore_location}}\nSOLR_SSL_KEY_STORE_PASSWORD={{logsearch_solr_keystore_password}}\nSOLR_SSL_TRUST_STORE={{logsearch_solr_keystore_location}}\nSOLR_SSL_TRUST_STORE_PASSWORD={{logsearch_solr_keystore_password}}\nSOLR_SSL_NEED_CLIENT_AUTH=false\nSOLR_SSL_WANT_CLIENT_AUTH=false\n{% endif %}\n\n# Uncomment to set a specific SSL port (-Djetty.ssl.port=N); if not set\n# and you are using SSL, then the start script will use SOLR_PORT for the SSL port\n#SOLR_SSL_PORT="
+      "infra-solr-env": {
+        "infra_solr_user": "solr",
+        "infra_solr_group": "solr",
+        "infra_solr_port": "8886",
+        "infra_solr_minmem": "512",
+        "infra_solr_maxmem": "512",
+        "infra_solr_znode": "/infra-solr",
+        "infra_solr_conf": "/etc/ambari-infra-solr",
+        "infra_solr_pid_dir": "/var/run/ambari-infra-solr",
+        "infra_solr_datadir": "/opt/ambari_infra_solr/data",
+        "infra_solr_log_dir": "/var/log/ambari-infra-solr",
+        "infra_solr_client_log_dir": "/var/log/ambari-infra-solr-client",
+        "content": "# By default the script will use JAVA_HOME to determine which java\n# to use, but you can set a specific path for Solr to use without\n# affecting other Java applications on your server/workstation.\nSOLR_JAVA_HOME={{java64_home}}\n\n# Increase Java Min/Max Heap as needed to support your indexing / query needs\nSOLR_JAVA_MEM=\"-Xms{{solr_min_mem}} -Xmx{{solr_max_mem}}\"\n\n# Enable verbose GC logging\nGC_LOG_OPTS=\"-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \\\n-XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime\"\n\n# These GC settings have shown to work well for a number of common Solr workloads\nGC_TUNE=\"-XX:NewRatio=3 \\\n-XX:SurvivorRatio=4 \\\n-XX:TargetSurvivorRatio=90 \\\n-XX:MaxTenuringThreshold=8 \\\n-XX:+UseConcMarkSweepGC \\\n-XX:+UseParNewGC \\\n-XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 \\\n-XX:+CMSScavengeBeforeRemark \\\n-XX:PretenureSizeThreshold=64m \\\n-XX:+UseCMSInitiatingOccupancyOnly \\\n-XX:CMSInitiatingOccupancyFraction=50 \\\n-XX:CMSMaxAbortablePrecleanTime=6000 \\\n-XX:+CMSParallelRemarkEnabled \\\n-XX:+ParallelRefProcEnabled\"\n\n# Set the ZooKeeper connection string if using an external ZooKeeper ensemble\n# e.g. host1:2181,host2:2181/chroot\n# Leave empty if not using SolrCloud\nZK_HOST=\"{{zookeeper_quorum}}{{solr_znode}}\"\n\n# Set the ZooKeeper client timeout (for SolrCloud mode)\nZK_CLIENT_TIMEOUT=\"60000\"\n\n# By default the start script uses \"localhost\"; override the hostname here\n# for production SolrCloud environments to control the hostname exposed to cluster state\n#SOLR_HOST=\"192.168.1.1\"\n\n# By default the start script uses UTC; override the timezone if needed\n#SOLR_TIMEZONE=\"UTC\"\n\n# Set to true to activate the JMX RMI connector to allow remote JMX client applications\n# to monitor the JVM hosting Solr; set to \"false\" to disable that behavior\n# (false is recommended in production environments)\nENABLE_REMOTE_JMX_OPTS=\"false\"\n\n# The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here\n# RMI_PORT=18983\n\n# Anything you add to the SOLR_OPTS variable will be included in the java\n# start command line as-is, in ADDITION to other options. If you specify the\n# -a option on start script, those options will be appended as well. Examples:\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.autoSoftCommit.maxTime=3000\"\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.autoCommit.maxTime=60000\"\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.clustering.enabled=true\"\n\n# Location where the bin/solr script will save PID files for running instances\n# If not set, the script will create PID files in $SOLR_TIP/bin\nSOLR_PID_DIR={{solr_piddir}}\n\n# Path to a directory where Solr creates index files, the specified directory\n# must contain a solr.xml; by default, Solr will use server/solr\nSOLR_HOME={{infra_solr_datadir}}\n\n# Solr provides a default Log4J configuration properties file in server/resources\n# however, you may want to customize the log settings and file appender location\n# so you can point the script to use a different log4j.properties file\nLOG4J_PROPS={{logsearch_solr_datadir}}/resources/log4j.properties\n\n# Location where Solr should write logs to; should agree with the file appender\n# settings in server/resources/log4j.properties\nSOLR_LOGS_DIR={{solr_log_dir}}\n\n# Sets the port Solr binds to, default is 8983\nSOLR_PORT={{solr_port}}\n\n# Be sure to update the paths to the correct keystore for your environment\n{% if logsearch_solr_ssl_enabled %}\nSOLR_SSL_KEY_STORE={{logsearch_solr_keystore_location}}\nSOLR_SSL_KEY_STORE_PASSWORD={{logsearch_solr_keystore_password}}\nSOLR_SSL_TRUST_STORE={{logsearch_solr_keystore_location}}\nSOLR_SSL_TRUST_STORE_PASSWORD={{logsearch_solr_keystore_password}}\nSOLR_SSL_NEED_CLIENT_AUTH=false\nSOLR_SSL_WANT_CLIENT_AUTH=false\n{% endif %}\n\n# Uncomment to set a specific SSL port (-Djetty.ssl.port=N); if not set\n# and you are using SSL, then the start script will use SOLR_PORT for the SSL port\n#SOLR_SSL_PORT="
       },
-      "logsearch-solr-xml": {
+      "infra-solr-xml": {
         "content": "\n&lt;solr&gt;\n  &lt;solrcloud&gt;\n    &lt;str name=\"host\"&gt;${host:}&lt;/str&gt;\n    &lt;int name=\"hostPort\"&gt;${jetty.port:}&lt;/int&gt;\n    &lt;str name=\"hostContext\"&gt;${hostContext:solr}&lt;/str&gt;\n    &lt;int name=\"zkClientTimeout\"&gt;${zkClientTimeout:15000}&lt;/int&gt;\n    &lt;bool name=\"genericCoreNodeNames\"&gt;${genericCoreNodeNames:true}&lt;/bool&gt;\n  &lt;/solrcloud&gt;\n&lt;/solr&gt;"
       },
-      "logsearch-solr-log4j": {
+      "infra-solr-log4j": {
         "content": "\n#  Logging level\nsolr.log={{solr_log_dir}}\n#log4j.rootLogger=INFO, file, CONSOLE\nlog4j.rootLogger=INFO, file\n\nlog4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender\n\nlog4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout\nlog4j.appender.CONSOLE.layout.ConversionPattern=%-4r [%t] %-5p %c %x [%X{collection} %X{shard} %X{replica} %X{core}] \\u2013 %m%n\n\n#- size rotation with log cleanup.\nlog4j.appender.file=org.apache.log4j.RollingFileAppender\nlog4j.appender.file.MaxFileSize=10MB\nlog4j.appender.file.MaxBackupIndex=9\n\n#- File to log to and log format\nlog4j.appender.file.File=${solr.log}/solr.log\nlog4j.appender.file.layout=org.apache.log4j.PatternLayout\nlog4j.appender.file.layout.ConversionPattern=%d{ISO8601} [%t] %-5p [%X{collection} %X{shard} %X{replica} %X{core}] %C (%F:%L) - %m%n\n\nlog4j.logger.org.apache.zookeeper=WARN\nlog4j.logger.org.apache.hadoop=WARN\n\n# set to INFO to enable infostream log messages\nlog4j.logger.org.apache.solr.update.LoggingInfoStream=OFF"
       },
       "logsearch-env": {
@@ -267,7 +268,6 @@
         "logsearch_ui_protocol": "http",
         "logsearch_ui_port" : "61888",
         "logsearch_solr_audit_logs_use_ranger": "false",
-        "logsearch_solr_jmx_port": "18886",
         "content": "# Licensed to the Apache Software Foundation (ASF) under one or more\n# contributor license agreements.  See the NOTICE file distributed with\n# this work for additional information regarding copyright ownership.\n# The ASF licenses this file to You under the Apache License, Version 2.0\n# (the \"License\"); you may not use this file except in compliance with\n# the License.  You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#solr.url=http://{{solr_host}}:{{solr_port}}/solr\n\n#Service Logs and History colletion\nlogsearch.solr.zkhosts={{zookeeper_quorum}}{{solr_znode}}\nlogsearch.solr.collection.service.logs={{logsearch_collection_service_logs}}\nlogsearch.solr.collection.history=history\n\nlogsearch.service.logs.split.interval.mins={{logsearch_service_logs_split_interval_mins}}\nlogsearch.collection.service.logs.numshards={{logsearch_collection_service_logs_numshards}}\nlogsearch.collection.service.logs.replication.factor={{logsearch_collection_service_logs_replication_factor}}\n\nlogsearch.service.logs.fields={{logsearch_service_logs_fields}}\n\n#Audit logs\nlogsearch.solr.audit.logs.zkhosts={{logsearch_solr_audit_logs_zk_quorum}}{{logsearch_solr_audit_logs_zk_node}}\nogsearch.solr.collection.audit.logs={{solr_collection_audit_logs}}\nlogsearch.solr.audit.logs.url={{logsearch_solr_audit_logs_url}}\n\nlogsearch.audit.logs.split.interval.mins={{logsearch_audit_logs_split_interval_mins}}\nlogsearch.collection.audit.logs.numshards={{logsearch_collection_audit_logs_numshards}}\nlogsearch.collection.audit.logs.replication.factor={{logsearch_collection_audit_logs_replication_factor}}\n{% if logsearch_solr_ssl_enabled %}\nexport LOGSEARCH_SSL=\"true\"\nexport LOGSEARCH_KEYSTORE_LOCATION={{logsearch_keystore_location}}\nexport LOGSEARCH_KEYSTORE_PASSWORD={{logsearch_keystore_password}}\nexport LOGSEARCH_KEYSTORE_TYPE={{logsearch_keystore_type}}\nexport LOGSEARCH_TRUSTSTORE_LOCATION={{logsearch_truststore_location}}\nexport LOGSEARCH_TRUSTSTORE_PASSWORD={{logsearch_truststore_password}}\nexport LOGSEARCH_TRUSTSTORE_TYPE={{logsearch_truststore_type}}\n{% endif %}"
       },
       "logsearch-service_logs-solrconfig": {
@@ -419,7 +419,7 @@
       "zookeeper_hosts": [
          "c6401.ambari.apache.org"
       ],
-      "logsearch_solr_hosts": [
+      "infra_solr_hosts": [
         "c6401.ambari.apache.org"
       ],
       "logsearch_server_hosts" : [
diff --git a/ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py b/ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py
index b155c37..7fdf3e6 100644
--- a/ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py
+++ b/ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py
@@ -124,58 +124,46 @@
                               group=u'hadoop',
                               mode=0644,
                               )
-    self.assertResourceCalled('Directory', '/var/log/ambari-logsearch-solr-client',
-                              owner='solr',
-                              group='hadoop',
+    self.assertResourceCalled('Directory', '/var/log/ambari-infra-solr-client',
                               create_parents=True,
                               cd_access='a',
                               mode=0755
     )
-    self.assertResourceCalled('Directory', '/usr/lib/ambari-logsearch-solr-client',
-                              owner='solr',
-                              group='hadoop',
+    self.assertResourceCalled('Directory', '/usr/lib/ambari-infra-solr-client',
                               create_parents = True,
                               recursive_ownership = True,
                               cd_access='a',
                               mode=0755
     )
-    self.assertResourceCalled('File', '/usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh',
-                              content=StaticFile('/usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh'),
-                              owner='solr',
-                              group='hadoop',
-                              mode=0755,
+    self.assertResourceCalled('File', '/usr/lib/ambari-infra-solr-client/solrCloudCli.sh',
+                              content=StaticFile('/usr/lib/ambari-infra-solr-client/solrCloudCli.sh'),
+                              mode=0755
     )
-    self.assertResourceCalled('File', '/usr/lib/ambari-logsearch-solr-client/log4j.properties',
+    self.assertResourceCalled('File', '/usr/lib/ambari-infra-solr-client/log4j.properties',
                               content=InlineTemplate(self.getConfig()['configurations'][
-                                                       'logsearch-solr-client-log4j']['content']),
-                              owner='solr',
-                              group='hadoop',
+                                                       'infra-solr-client-log4j']['content']),
                               mode=0644,
     )
-    self.assertResourceCalled('File', '/var/log/ambari-logsearch-solr-client/solr-client.log',
-                              owner='solr',
-                              group='hadoop',
+    self.assertResourceCalled('File', '/var/log/ambari-infra-solr-client/solr-client.log',
                               mode=0664,
                               content = ''
     )
 
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /logsearch --check-znode --retry 5 --interval 10')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --download-config --config-dir /tmp/solr_config_basic_configs_0.[0-9]* --config-set basic_configs --retry 30 --interval 5')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --check-znode --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --download-config --config-dir /tmp/solr_config_basic_configs_0.[0-9]* --config-set basic_configs --retry 30 --interval 5')
     self.assertResourceCalledRegexp('^File$', '^/tmp/solr_config_basic_configs_0.[0-9]*',
                                     content=InlineTemplate(self.getConfig()['configurations']['atlas-solrconfig']['content']),
-                                    owner='atlas',
                                     only_if='test -d /tmp/solr_config_basic_configs_0.[0-9]*')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --upload-config --config-dir /tmp/solr_config_basic_configs_0.[0-9]* --config-set basic_configs --retry 30 --interval 5',
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /tmp/solr_config_basic_configs_0.[0-9]* --config-set basic_configs --retry 30 --interval 5',
                                     only_if='test -d /tmp/solr_config_basic_configs_0.[0-9]*')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --upload-config --config-dir /etc/atlas/conf/solr --config-set basic_configs --retry 30 --interval 5',
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /etc/atlas/conf/solr --config-set basic_configs --retry 30 --interval 5',
                                     not_if='test -d /tmp/solr_config_basic_configs_0.[0-9]*')
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_basic_configs_0.[0-9]*',
                                     action=['delete'],
-                                    owner='atlas',
                                     create_parents=True)
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --create-collection --collection vertex_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --create-collection --collection edge_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/logsearch --create-collection --collection fulltext_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection vertex_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection edge_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection fulltext_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
 
 
   def test_configure_default(self):
diff --git a/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py b/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
index e39e1cd..10db1c8 100644
--- a/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
+++ b/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
@@ -51,47 +51,36 @@
     )
     self.assert_configure_default()
 
-    self.assertResourceCalled('Directory', '/var/log/ambari-logsearch-solr-client',
-        owner = 'logsearch-solr',
-        group = 'hadoop',
+    self.assertResourceCalled('Directory', '/var/log/ambari-infra-solr-client',
         create_parents = True,
         mode = 0755,
         cd_access = 'a',
     )
-    self.assertResourceCalled('Directory', '/usr/lib/ambari-logsearch-solr-client',
-        group = 'hadoop',
+    self.assertResourceCalled('Directory', '/usr/lib/ambari-infra-solr-client',
         cd_access = 'a',
         create_parents = True,
         mode = 0755,
-        owner = 'logsearch-solr',
         recursive_ownership = True,
     )
-    self.assertResourceCalled('File', '/usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh',
-        content = StaticFile('/usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh'),
-        owner = 'logsearch-solr',
-        group = 'hadoop',
+    self.assertResourceCalled('File', '/usr/lib/ambari-infra-solr-client/solrCloudCli.sh',
+        content = StaticFile('/usr/lib/ambari-infra-solr-client/solrCloudCli.sh'),
         mode = 0755,
     )
-    self.assertResourceCalled('File', '/usr/lib/ambari-logsearch-solr-client/log4j.properties',
-        owner = 'logsearch-solr',
-        content = InlineTemplate(self.getConfig()['configurations']['logsearch-solr-client-log4j']['content']),
-        group = 'hadoop',
+    self.assertResourceCalled('File', '/usr/lib/ambari-infra-solr-client/log4j.properties',
+        content = InlineTemplate(self.getConfig()['configurations']['infra-solr-client-log4j']['content']),
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/var/log/ambari-logsearch-solr-client/solr-client.log',
+    self.assertResourceCalled('File', '/var/log/ambari-infra-solr-client/solr-client.log',
         content = '',
-        owner = 'logsearch-solr',
-        group = 'hadoop',
         mode = 0664,
     )
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /ambari-solr --check-znode --retry 5 --interval 10')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/ambari-solr --download-config --config-dir /tmp/solr_config_ranger_audits_0.[0-9]* --config-set ranger_audits --retry 30 --interval 5')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/ambari-solr --upload-config --config-dir /usr/hdp/current/ranger-admin/contrib/solr_for_audit_setup/conf --config-set ranger_audits --retry 30 --interval 5')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --check-znode --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --download-config --config-dir /tmp/solr_config_ranger_audits_0.[0-9]* --config-set ranger_audits --retry 30 --interval 5')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /usr/hdp/current/ranger-admin/contrib/solr_for_audit_setup/conf --config-set ranger_audits --retry 30 --interval 5')
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_ranger_audits_0.[0-9]*',
                                     action=['delete'],
-                                    owner='ranger',
                                     create_parents=True)
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/ambari-solr --create-collection --collection ranger_audits --config-set ranger_audits --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection ranger_audits --config-set ranger_audits --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
 
     self.assertResourceCalled('Execute', '/usr/bin/ranger-admin-start',
       environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
@@ -143,51 +132,40 @@
     )
     self.assert_configure_secured()
 
-    self.assertResourceCalled('Directory', '/var/log/ambari-logsearch-solr-client',
-        owner = 'logsearch-solr',
-        group = 'hadoop',
+    self.assertResourceCalled('Directory', '/var/log/ambari-infra-solr-client',
         create_parents = True,
         mode = 0755,
         cd_access = 'a',
     )
-    self.assertResourceCalled('Directory', '/usr/lib/ambari-logsearch-solr-client',
-        group = 'hadoop',
+    self.assertResourceCalled('Directory', '/usr/lib/ambari-infra-solr-client',
         cd_access = 'a',
         create_parents = True,
         mode = 0755,
-        owner = 'logsearch-solr',
         recursive_ownership = True,
     )
-    self.assertResourceCalled('File', '/usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh',
-        content = StaticFile('/usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh'),
-        owner = 'logsearch-solr',
-        group = 'hadoop',
+    self.assertResourceCalled('File', '/usr/lib/ambari-infra-solr-client/solrCloudCli.sh',
+        content = StaticFile('/usr/lib/ambari-infra-solr-client/solrCloudCli.sh'),
         mode = 0755,
     )
-    self.assertResourceCalled('File', '/usr/lib/ambari-logsearch-solr-client/log4j.properties',
-        owner = 'logsearch-solr',
-        content = InlineTemplate(self.getConfig()['configurations']['logsearch-solr-client-log4j']['content']),
-        group = 'hadoop',
+    self.assertResourceCalled('File', '/usr/lib/ambari-infra-solr-client/log4j.properties',
+        content = InlineTemplate(self.getConfig()['configurations']['infra-solr-client-log4j']['content']),
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/var/log/ambari-logsearch-solr-client/solr-client.log',
+    self.assertResourceCalled('File', '/var/log/ambari-infra-solr-client/solr-client.log',
         content = '',
-        owner = 'logsearch-solr',
-        group = 'hadoop',
         mode = 0664,
     )
     self.assertResourceCalled('File', '/usr/hdp/current/ranger-admin/conf/ranger_solr_jaas.conf',
       content = Template('ranger_solr_jaas_conf.j2'),
       owner = 'ranger',
     )
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /ambari-solr --check-znode --retry 5 --interval 10')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/ambari-solr --download-config --config-dir /tmp/solr_config_ranger_audits_0.[0-9]* --config-set ranger_audits --retry 30 --interval 5')
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/ambari-solr --upload-config --config-dir /usr/hdp/current/ranger-admin/contrib/solr_for_audit_setup/conf --config-set ranger_audits --retry 30 --interval 5')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /ambari-solr --check-znode --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/ambari-solr --download-config --config-dir /tmp/solr_config_ranger_audits_0.[0-9]* --config-set ranger_audits --retry 30 --interval 5')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/ambari-solr --upload-config --config-dir /usr/hdp/current/ranger-admin/contrib/solr_for_audit_setup/conf --config-set ranger_audits --retry 30 --interval 5')
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_ranger_audits_0.[0-9]*',
                                     action=['delete'],
-                                    owner='ranger',
                                     create_parents=True)
-    self.assertResourceCalledRegexp('^Execute$', '^export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/ambari-solr --create-collection --collection ranger_audits --config-set ranger_audits --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/ambari-solr --create-collection --collection ranger_audits --config-set ranger_audits --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
 
     self.assertResourceCalled('Execute', '/usr/bin/ranger-admin-start',
       environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index 2d4af2e..d1f6d88 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -6540,9 +6540,9 @@
           "atlas.kafka.zookeeper.connect": ""
         }
       },
-      "logsearch-solr-env": {
+      "infra-solr-env": {
         "properties": {
-          "logsearch_solr_znode": "/logsearch"
+          "infra_solr_znode": "/infra-solr"
         }
       },
       'ranger-atlas-plugin-properties': {
@@ -6562,7 +6562,7 @@
     expected = {
       'application-properties': {
         'properties': {
-          'atlas.graph.index.search.solr.zookeeper-url': 'c6401.ambari.apache.org:2181/logsearch',
+          'atlas.graph.index.search.solr.zookeeper-url': 'c6401.ambari.apache.org:2181/infra-solr',
           "atlas.audit.hbase.zookeeper.quorum": "c6401.ambari.apache.org",
           "atlas.graph.storage.hostname": "c6401.ambari.apache.org",
           "atlas.kafka.bootstrap.servers": "c6401.ambari.apache.org:6667",
@@ -6571,9 +6571,9 @@
           "atlas.rest.address": "http://c6401.ambari.apache.org:21000"
         }
       },
-      "logsearch-solr-env": {
+      "infra-solr-env": {
         "properties": {
-          "logsearch_solr_znode": "/logsearch"
+          "infra_solr_znode": "/infra-solr"
         }
       },
       'ranger-atlas-plugin-properties': {
@@ -6595,9 +6595,9 @@
       },
       "services": [
         {
-          "href": "/api/v1/stacks/HDP/versions/2.2/services/LOGSEARCH",
+          "href": "/api/v1/stacks/HDP/versions/2.2/services/AMBARI_INFRA",
           "StackServices": {
-            "service_name": "LOGSEARCH",
+            "service_name": "AMBARI_INFRA",
             "service_version": "2.6.0.2.2",
             "stack_name": "HDP",
             "stack_version": "2.3"
@@ -6608,7 +6608,7 @@
                 "advertise_version": "false",
                 "cardinality": "1",
                 "component_category": "MASTER",
-                "component_name": "LOGSEARCH_SOLR",
+                "component_name": "INFRA_SOLR",
                 "display_name": "solr",
                 "is_client": "false",
                 "is_master": "true",
@@ -6725,9 +6725,9 @@
             "atlas.kafka.zookeeper.connect": ""
           }
         },
-        "logsearch-solr-env": {
+        "infra-solr-env": {
           "properties": {
-            "logsearch_solr_znode": "/logsearch"
+            "infra_solr_znode": "/infra-solr"
           }
         },
         "hbase-site": {
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/default.json b/ambari-server/src/test/python/stacks/2.5/configs/default.json
index 407e4b9..9df7820 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/default.json
@@ -228,12 +228,13 @@
       "livy_log_dir": "/var/log/livy",
       "livy_user": "livy"
     },
-    "logsearch-solr-env": {
-      "logsearch_solr_znode": "/logsearch",
-      "logsearch_solr_user": "solr",
-      "logsearch_solr_client_log_dir" :"/var/log/ambari-logsearch-solr-client"
+    "infra-solr-env": {
+      "infra_solr_znode": "/infra-solr",
+      "infra_solr_user": "solr",
+      "infra_solr_client_log_dir" :"/var/log/ambari-infra-solr-client"
     },
-    "logsearch-solr-client-log4j" : {
+    "infra-solr-client-log4j" : {
+      "infra_solr_client_log_dir" : "/var/log/ambari-infra-solr-client",
       "content" : "content"
     },
     "application-properties": {
@@ -432,7 +433,7 @@
     "kafka_broker_hosts": [
       "c6401.ambari.apache.org"
     ],
-    "logsearch_solr_hosts": [
+    "infra_solr_hosts": [
       "c6401.ambari.apache.org"
     ],
     "zookeeper_hosts": [
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-default.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-default.json
index 9836e73..4510f4b 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-default.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-default.json
@@ -51,8 +51,8 @@
         "ranger-hdfs-security": {}, 
         "usersync-properties": {}, 
         "zookeeper-env": {},
-        "logsearch-solr-env": {},
-        "logsearch-solr-client-log4j": {},
+        "infra-solr-env": {},
+        "infra-solr-client-log4j": {},
         "cluster-env": {}
     }, 
     "public_hostname": "c6401.ambari.apache.org", 
@@ -148,10 +148,10 @@
         "zookeeper-env": {
             "tag": "version1"
         },
-        "logsearch-solr-env": {
+        "infra-solr-env": {
             "tag": "version1467098537360"
         },
-        "logsearch-solr-client-log4j": {
+        "infra-solr-client-log4j": {
             "tag": "version1467096917836"
         },
         "cluster-env": {
@@ -246,7 +246,7 @@
         "zookeeper_hosts": [
             "c6401.ambari.apache.org"
         ],
-        "logsearch_solr_hosts": [
+        "infra_solr_hosts": [
             "c6401.ambari.apache.org"
         ]
     }, 
@@ -645,32 +645,33 @@
             "zk_pid_dir": "/var/run/zookeeper", 
             "zk_user": "zookeeper"
         },
-        "logsearch-solr-env": {
-            "logsearch_solr_datadir": "/opt/logsearch_solr/data", 
-            "logsearch_solr_keystore_location": "/etc/security/serverKeys/logsearch.keyStore.jks", 
-            "logsearch_solr_kerberos_name_rules": "DEFAULT", 
-            "logsearch_solr_user": "logsearch-solr", 
-            "logsearch_solr_maxmem": "1024", 
+        "infra-solr-env": {
+            "infra_solr_datadir": "/opt/logsearch_solr/data",
+            "infra_solr_keystore_location": "/etc/security/serverKeys/logsearch.keyStore.jks",
+            "infra_solr_kerberos_name_rules": "DEFAULT",
+            "infra_solr_user": "infra-solr",
+            "infra_solr_maxmem": "1024",
             "content": "#!/bin/bash\n# Licensed to the Apache Software Foundation (ASF) under one or more\n# contributor license agreements. See the NOTICE file distributed with\n# this work for additional information regarding copyright ownership.\n# The ASF licenses this file to You under the Apache License, Version 2.0\n# (the \"License\"); you may not use this file except in compliance with\n# the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# By default the script will use JAVA_HOME to determine which java\n# to use, but you can set a specific path for Solr to use without\n# affecting other Java applications on your server/workstation.\nSOLR_JAVA_HOME={{java64_home}}\n\n# Increase Java Min/Max Heap as needed to support your indexing / query needs\nSOLR_JAVA_MEM=\"-Xms{{logsearch_solr_min_mem}}m -Xmx{{logsearch_solr_max_mem}}m\"\n\n# Enable verbose GC logging\nGC_LOG_OPTS=\"-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \\\n-XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime\"\n\n# These GC settings have shown to work well for a number of common Solr workloads\nGC_TUNE=\"-XX:NewRatio=3 \\\n-XX:SurvivorRatio=4 \\\n-XX:TargetSurvivorRatio=90 \\\n-XX:MaxTenuringThreshold=8 \\\n-XX:+UseConcMarkSweepGC \\\n-XX:+UseParNewGC \\\n-XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 \\\n-XX:+CMSScavengeBeforeRemark \\\n-XX:PretenureSizeThreshold=64m \\\n-XX:+UseCMSInitiatingOccupancyOnly \\\n-XX:CMSInitiatingOccupancyFraction=50 \\\n-XX:CMSMaxAbortablePrecleanTime=6000 \\\n-XX:+CMSParallelRemarkEnabled \\\n-XX:+ParallelRefProcEnabled\"\n\n# Set the ZooKeeper connection string if using an external ZooKeeper ensemble\n# e.g. host1:2181,host2:2181/chroot\n# Leave empty if not using SolrCloud\nZK_HOST=\"{{zookeeper_quorum}}{{logsearch_solr_znode}}\"\n\n# Set the ZooKeeper client timeout (for SolrCloud mode)\nZK_CLIENT_TIMEOUT=\"60000\"\n\n# By default the start script uses \"localhost\"; override the hostname here\n# for production SolrCloud environments to control the hostname exposed to cluster state\n#SOLR_HOST=\"192.168.1.1\"\n\n# By default the start script uses UTC; override the timezone if needed\n#SOLR_TIMEZONE=\"UTC\"\n\n# Set to true to activate the JMX RMI connector to allow remote JMX client applications\n# to monitor the JVM hosting Solr; set to \"false\" to disable that behavior\n# (false is recommended in production environments)\nENABLE_REMOTE_JMX_OPTS=\"true\"\n\n# The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here\nRMI_PORT={{logsearch_solr_jmx_port}}\n\n# Anything you add to the SOLR_OPTS variable will be included in the java\n# start command line as-is, in ADDITION to other options. If you specify the\n# -a option on start script, those options will be appended as well. Examples:\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.autoSoftCommit.maxTime=3000\"\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.autoCommit.maxTime=60000\"\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.clustering.enabled=true\"\n\n# Location where the bin/solr script will save PID files for running instances\n# If not set, the script will create PID files in $SOLR_TIP/bin\nSOLR_PID_DIR={{logsearch_solr_piddir}}\n\n# Path to a directory where Solr creates index files, the specified directory\n# must contain a solr.xml; by default, Solr will use server/solr\nSOLR_HOME={{logsearch_solr_datadir}}\n\n# Solr provides a default Log4J configuration properties file in server/resources\n# however, you may want to customize the log settings and file appender location\n# so you can point the script to use a different log4j.properties file\nLOG4J_PROPS={{logsearch_solr_conf}}/log4j.properties\n\n# Location where Solr should write logs to; should agree with the file appender\n# settings in server/resources/log4j.properties\nSOLR_LOGS_DIR={{logsearch_solr_log_dir}}\n\n# Sets the port Solr binds to, default is 8983\nSOLR_PORT={{logsearch_solr_port}}\n\n# Be sure to update the paths to the correct keystore for your environment\n{% if logsearch_solr_ssl_enabled %}\nSOLR_SSL_KEY_STORE={{logsearch_solr_keystore_location}}\nSOLR_SSL_KEY_STORE_PASSWORD={{logsearch_solr_keystore_password}}\nSOLR_SSL_TRUST_STORE={{logsearch_solr_keystore_location}}\nSOLR_SSL_TRUST_STORE_PASSWORD={{logsearch_solr_keystore_password}}\nSOLR_SSL_NEED_CLIENT_AUTH=false\nSOLR_SSL_WANT_CLIENT_AUTH=false\n{% endif %}\n\n# Uncomment to set a specific SSL port (-Djetty.ssl.port=N); if not set\n# and you are using SSL, then the start script will use SOLR_PORT for the SSL port\n#SOLR_SSL_PORT=\n\n{% if security_enabled -%}\nSOLR_HOST=`hostname -f`\nSOLR_JAAS_FILE={{logsearch_solr_jaas_file}}\nSOLR_KERB_KEYTAB={{logsearch_solr_web_kerberos_keytab}}\nSOLR_KERB_PRINCIPAL={{logsearch_solr_web_kerberos_principal}}\nSOLR_KERB_NAME_RULES={{logsearch_solr_kerberos_name_rules}}\n\nSOLR_AUTHENTICATION_CLIENT_CONFIGURER=\"org.apache.solr.client.solrj.impl.Krb5HttpClientConfigurer\"\nSOLR_AUTHENTICATION_OPTS=\" -DauthenticationPlugin=org.apache.solr.security.KerberosPlugin -Djava.security.auth.login.config=$SOLR_JAAS_FILE -Dsolr.kerberos.principal=${SOLR_KERB_PRINCIPAL} -Dsolr.kerberos.keytab=${SOLR_KERB_KEYTAB} -Dsolr.kerberos.cookie.domain=${SOLR_HOST} -Dsolr.kerberos.name.rules=${SOLR_KERB_NAME_RULES}\"\n{% endif %}", 
-            "logsearch_solr_pid_dir": "/var/run/ambari-logsearch-solr", 
-            "logsearch_solr_truststore_password": "bigdata", 
-            "logsearch_solr_truststore_type": "jks", 
-            "logsearch_solr_keystore_type": "jks", 
-            "logsearch_solr_log_dir": "/var/log/ambari-logsearch-solr", 
-            "logsearch_solr_web_kerberos_keytab": "/etc/security/keytabs/spnego.service.keytab", 
-            "logsearch_solr_ssl_enabled": "false", 
-            "logsearch_solr_client_log_dir": "/var/log/ambari-logsearch-solr-client", 
-            "logsearch_solr_web_kerberos_principal": "HTTP/_HOST@EXAMPLE.COM", 
-            "logsearch_solr_znode": "/ambari-solr", 
-            "logsearch_solr_keystore_password": "bigdata", 
-            "logsearch_solr_port": "8886", 
-            "logsearch_solr_kerberos_principal": "logsearch-solr/_HOST@EXAMPLE.COM", 
-            "logsearch_solr_jmx_port": "18886", 
-            "logsearch_solr_truststore_location": "/etc/security/serverKeys/logsearch.trustStore.jks", 
-            "logsearch_solr_minmem": "512", 
-            "logsearch_solr_kerberos_keytab": "/etc/security/keytabs/logsearch-solr.service.keytab"
+            "infra_solr_pid_dir": "/var/run/ambari-infra-solr",
+            "infra_solr_truststore_password": "bigdata",
+            "infra_solr_truststore_type": "jks",
+            "infra_solr_keystore_type": "jks",
+            "infra_solr_log_dir": "/var/log/ambari-infra-solr",
+            "infra_solr_web_kerberos_keytab": "/etc/security/keytabs/spnego.service.keytab",
+            "infra_solr_ssl_enabled": "false",
+            "infra_solr_client_log_dir": "/var/log/ambari-infra-solr-client",
+            "infra_solr_web_kerberos_principal": "HTTP/_HOST@EXAMPLE.COM",
+            "infra_solr_znode": "/infra-solr",
+            "infra_solr_keystore_password": "bigdata",
+            "infra_solr_port": "8886",
+            "infra_solr_kerberos_principal": "infra-solr/_HOST@EXAMPLE.COM",
+            "infra_solr_jmx_port": "18886",
+            "infra_solr_truststore_location": "/etc/security/serverKeys/logsearch.trustStore.jks",
+            "infra_solr_minmem": "512",
+            "infra_solr_kerberos_keytab": "/etc/security/keytabs/infra-solr.service.keytab"
         },
-        "logsearch-solr-client-log4j": {
+        "infra-solr-client-log4j": {
+            "infra_solr_client_log_dir" : "/var/log/ambari-infra-solr-client",
             "content": "# Copyright 2011 The Apache Software Foundation\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nlog4j.rootLogger=INFO,file,stdout,stderr\n\nlog4j.appender.file=org.apache.log4j.RollingFileAppender\nlog4j.appender.file.File={{logsearch_solr_client_log}}\nlog4j.appender.file.MaxFileSize=80MB\nlog4j.appender.file.MaxBackupIndex=60\nlog4j.appender.file.layout=org.apache.log4j.PatternLayout\nlog4j.appender.file.layout.ConversionPattern=%d{DATE} %5p [%t] %c{1}:%L - %m%n\n\nlog4j.appender.stdout=org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.Threshold=INFO\nlog4j.appender.stdout.Target=System.out\nlog4j.appender.stdout.layout=org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%m%n\n\nlog4j.appender.stderr=org.apache.log4j.ConsoleAppender\nlog4j.appender.stderr.Threshold=ERROR\nlog4j.appender.stderr.Target=System.err\nlog4j.appender.stderr.layout=org.apache.log4j.PatternLayout\nlog4j.appender.stderr.layout.ConversionPattern=%m%n"
         },
         "cluster-env": {
@@ -691,7 +692,7 @@
             "user_group": "hadoop", 
             "stack_tools": "{\n  \"stack_selector\": [\"hdp-select\", \"/usr/bin/hdp-select\", \"hdp-select\"],\n  \"conf_selector\": [\"conf-select\", \"/usr/bin/conf-select\", \"conf-select\"]\n}", 
             "recovery_retry_interval": "5", 
-            "stack_features": "{\n  \"stack_features\": [\n    {\n      \"name\": \"snappy\",\n      \"description\": \"Snappy compressor/decompressor support\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"lzo\",\n      \"description\": \"LZO libraries support\",\n      \"min_version\": \"2.2.1.0\"\n    },\n    {\n      \"name\": \"express_upgrade\",\n      \"description\": \"Express upgrade support\",\n      \"min_version\": \"2.1.0.0\"\n    },\n    {\n      \"name\": \"rolling_upgrade\",\n      \"description\": \"Rolling upgrade support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"config_versioning\",\n      \"description\": \"Configurable versions support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"datanode_non_root\",\n      \"description\": \"DataNode running as non-root support (AMBARI-7615)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"remove_ranger_hdfs_plugin_env\",\n      \"description\": \"HDFS removes Ranger env files (AMBARI-14299)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger\",\n      \"description\": \"Ranger Service support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_tagsync_component\",\n      \"description\": \"Ranger Tagsync component support (AMBARI-14383)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"phoenix\",\n      \"description\": \"Phoenix Service support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"nfs\",\n      \"description\": \"NFS support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"tez_for_spark\",\n      \"description\": \"Tez dependency for Spark\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"timeline_state_store\",\n      \"description\": \"Yarn application timeline-service supports state store property (AMBARI-11442)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"copy_tarball_to_hdfs\",\n      \"description\": \"Copy tarball to HDFS support (AMBARI-12113)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"spark_16plus\",\n      \"description\": \"Spark 1.6+\",\n      \"min_version\": \"2.4.0.0\"\n    },\n    {\n      \"name\": \"spark_thriftserver\",\n      \"description\": \"Spark Thrift Server\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"storm_kerberos\",\n      \"description\": \"Storm Kerberos support (AMBARI-7570)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"storm_ams\",\n      \"description\": \"Storm AMS integration (AMBARI-10710)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"create_kafka_broker_id\",\n      \"description\": \"Ambari should create Kafka Broker Id (AMBARI-12678)\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_listeners\",\n      \"description\": \"Kafka listeners (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_kerberos\",\n      \"description\": \"Kafka Kerberos support (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"pig_on_tez\",\n      \"description\": \"Pig on Tez support (AMBARI-7863)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_non_root\",\n      \"description\": \"Ranger Usersync as non-root user (AMBARI-10416)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger_audit_db_support\",\n      \"description\": \"Ranger Audit to DB support\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"accumulo_kerberos_user_auth\",\n      \"description\": \"Accumulo Kerberos User Auth (AMBARI-10163)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"knox_versioned_data_dir\",\n      \"description\": \"Use versioned data dir for Knox (AMBARI-13164)\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"knox_sso_topology\",\n      \"description\": \"Knox SSO Topology support (AMBARI-13975)\",\n      \"min_version\": \"2.3.8.0\"\n    },\n    {\n      \"name\": \"atlas_rolling_upgrade\",\n      \"description\": \"Rolling upgrade support for Atlas\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"oozie_admin_user\",\n      \"description\": \"Oozie install user as an Oozie admin user (AMBARI-7976)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_create_hive_tez_configs\",\n      \"description\": \"Oozie create configs for Ambari Hive and Tez deployments (AMBARI-8074)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_setup_shared_lib\",\n      \"description\": \"Oozie setup tools used to shared Oozie lib to HDFS (AMBARI-7240)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_host_kerberos\",\n      \"description\": \"Oozie in secured clusters uses _HOST in Kerberos principal (AMBARI-9775)\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"falcon_extensions\",\n      \"description\": \"Falcon Extension\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_upgrade_schema\",\n      \"description\": \"Hive metastore upgrade schema support (AMBARI-11176)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server_interactive\",\n      \"description\": \"Hive server interactive support (AMBARI-15573)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_webhcat_specific_configs\",\n      \"description\": \"Hive webhcat specific configurations support (AMBARI-12364)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_purge_table\",\n      \"description\": \"Hive purge table support (AMBARI-12260)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server2_kerberized_env\",\n      \"description\": \"Hive server2 working on kerberized environment (AMBARI-13749)\",\n      \"min_version\": \"2.2.3.0\",\n      \"max_version\": \"2.2.5.0\"\n    },\n    {\n      \"name\": \"hive_env_heapsize\",\n      \"description\": \"Hive heapsize property defined in hive-env (AMBARI-12801)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_hsm_support\",\n      \"description\": \"Ranger KMS HSM support (AMBARI-15752)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_log4j_support\",\n      \"description\": \"Ranger supporting log-4j properties (AMBARI-15681)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kerberos_support\",\n      \"description\": \"Ranger Kerberos support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_site_support\",\n      \"description\": \"Hive Metastore site support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_password_jceks\",\n      \"description\": \"Saving Ranger Usersync credentials in jceks\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_install_logsearch_client\",\n      \"description\": \"LogSearch Service support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hbase_home_directory\",\n      \"description\": \"Hbase home directory in HDFS needed for HBASE backup\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"spark_livy\",\n      \"description\": \"Livy as slave component of spark\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"atlas_ranger_plugin_support\",\n      \"description\": \"Atlas Ranger plugin support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_pid_support\",\n      \"description\": \"Ranger Service support pid generation AMBARI-16756\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_pid_support\",\n      \"description\": \"Ranger KMS Service support pid generation\",\n      \"min_version\": \"2.5.0.0\"\n    }\n  ]\n}", 
+            "stack_features": "{\n  \"stack_features\": [\n    {\n      \"name\": \"snappy\",\n      \"description\": \"Snappy compressor/decompressor support\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"lzo\",\n      \"description\": \"LZO libraries support\",\n      \"min_version\": \"2.2.1.0\"\n    },\n    {\n      \"name\": \"express_upgrade\",\n      \"description\": \"Express upgrade support\",\n      \"min_version\": \"2.1.0.0\"\n    },\n    {\n      \"name\": \"rolling_upgrade\",\n      \"description\": \"Rolling upgrade support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"config_versioning\",\n      \"description\": \"Configurable versions support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"datanode_non_root\",\n      \"description\": \"DataNode running as non-root support (AMBARI-7615)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"remove_ranger_hdfs_plugin_env\",\n      \"description\": \"HDFS removes Ranger env files (AMBARI-14299)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger\",\n      \"description\": \"Ranger Service support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_tagsync_component\",\n      \"description\": \"Ranger Tagsync component support (AMBARI-14383)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"phoenix\",\n      \"description\": \"Phoenix Service support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"nfs\",\n      \"description\": \"NFS support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"tez_for_spark\",\n      \"description\": \"Tez dependency for Spark\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"timeline_state_store\",\n      \"description\": \"Yarn application timeline-service supports state store property (AMBARI-11442)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"copy_tarball_to_hdfs\",\n      \"description\": \"Copy tarball to HDFS support (AMBARI-12113)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"spark_16plus\",\n      \"description\": \"Spark 1.6+\",\n      \"min_version\": \"2.4.0.0\"\n    },\n    {\n      \"name\": \"spark_thriftserver\",\n      \"description\": \"Spark Thrift Server\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"storm_kerberos\",\n      \"description\": \"Storm Kerberos support (AMBARI-7570)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"storm_ams\",\n      \"description\": \"Storm AMS integration (AMBARI-10710)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"create_kafka_broker_id\",\n      \"description\": \"Ambari should create Kafka Broker Id (AMBARI-12678)\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_listeners\",\n      \"description\": \"Kafka listeners (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_kerberos\",\n      \"description\": \"Kafka Kerberos support (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"pig_on_tez\",\n      \"description\": \"Pig on Tez support (AMBARI-7863)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_non_root\",\n      \"description\": \"Ranger Usersync as non-root user (AMBARI-10416)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger_audit_db_support\",\n      \"description\": \"Ranger Audit to DB support\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"accumulo_kerberos_user_auth\",\n      \"description\": \"Accumulo Kerberos User Auth (AMBARI-10163)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"knox_versioned_data_dir\",\n      \"description\": \"Use versioned data dir for Knox (AMBARI-13164)\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"knox_sso_topology\",\n      \"description\": \"Knox SSO Topology support (AMBARI-13975)\",\n      \"min_version\": \"2.3.8.0\"\n    },\n    {\n      \"name\": \"atlas_rolling_upgrade\",\n      \"description\": \"Rolling upgrade support for Atlas\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"oozie_admin_user\",\n      \"description\": \"Oozie install user as an Oozie admin user (AMBARI-7976)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_create_hive_tez_configs\",\n      \"description\": \"Oozie create configs for Ambari Hive and Tez deployments (AMBARI-8074)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_setup_shared_lib\",\n      \"description\": \"Oozie setup tools used to shared Oozie lib to HDFS (AMBARI-7240)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_host_kerberos\",\n      \"description\": \"Oozie in secured clusters uses _HOST in Kerberos principal (AMBARI-9775)\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"falcon_extensions\",\n      \"description\": \"Falcon Extension\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_upgrade_schema\",\n      \"description\": \"Hive metastore upgrade schema support (AMBARI-11176)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server_interactive\",\n      \"description\": \"Hive server interactive support (AMBARI-15573)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_webhcat_specific_configs\",\n      \"description\": \"Hive webhcat specific configurations support (AMBARI-12364)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_purge_table\",\n      \"description\": \"Hive purge table support (AMBARI-12260)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server2_kerberized_env\",\n      \"description\": \"Hive server2 working on kerberized environment (AMBARI-13749)\",\n      \"min_version\": \"2.2.3.0\",\n      \"max_version\": \"2.2.5.0\"\n    },\n    {\n      \"name\": \"hive_env_heapsize\",\n      \"description\": \"Hive heapsize property defined in hive-env (AMBARI-12801)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_hsm_support\",\n      \"description\": \"Ranger KMS HSM support (AMBARI-15752)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_log4j_support\",\n      \"description\": \"Ranger supporting log-4j properties (AMBARI-15681)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kerberos_support\",\n      \"description\": \"Ranger Kerberos support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_site_support\",\n      \"description\": \"Hive Metastore site support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_password_jceks\",\n      \"description\": \"Saving Ranger Usersync credentials in jceks\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_install_infra_client\",\n      \"description\": \"LogSearch Service support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hbase_home_directory\",\n      \"description\": \"Hbase home directory in HDFS needed for HBASE backup\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"spark_livy\",\n      \"description\": \"Livy as slave component of spark\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"atlas_ranger_plugin_support\",\n      \"description\": \"Atlas Ranger plugin support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_pid_support\",\n      \"description\": \"Ranger Service support pid generation AMBARI-16756\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_pid_support\",\n      \"description\": \"Ranger KMS Service support pid generation\",\n      \"min_version\": \"2.5.0.0\"\n    }\n  ]\n}",
             "recovery_enabled": "true", 
             "recovery_max_count": "6", 
             "stack_root": "/usr/hdp", 
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
index e8c8199..9144f12 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
@@ -54,8 +54,8 @@
         "ranger-hdfs-security": {}, 
         "usersync-properties": {}, 
         "zookeeper-env": {},
-        "logsearch-solr-env": {},
-        "logsearch-solr-client-log4j": {},
+        "infra-solr-env": {},
+        "infra-solr-client-log4j": {},
         "cluster-env": {}
     }, 
     "public_hostname": "c6401.ambari.apache.org", 
@@ -157,10 +157,10 @@
         "zookeeper-env": {
             "tag": "version1467016680492"
         },
-        "logsearch-solr-env": {
+        "infra-solr-env": {
             "tag": "version1467098537360"
         },
-        "logsearch-solr-client-log4j": {
+        "infra-solr-client-log4j": {
             "tag": "version1467096917836"
         },
         "cluster-env": {
@@ -255,7 +255,7 @@
         "zookeeper_hosts": [
             "c6401.ambari.apache.org"
         ],
-        "logsearch_solr_hosts": [
+        "infra_solr_hosts": [
             "c6401.ambari.apache.org"
         ]
     }, 
@@ -712,32 +712,33 @@
             "zk_pid_dir": "/var/run/zookeeper", 
             "zookeeper_principal_name": "zookeeper/_HOST@EXAMPLE.COM"
         },
-        "logsearch-solr-env": {
-            "logsearch_solr_datadir": "/opt/logsearch_solr/data", 
-            "logsearch_solr_keystore_location": "/etc/security/serverKeys/logsearch.keyStore.jks", 
-            "logsearch_solr_kerberos_name_rules": "DEFAULT", 
-            "logsearch_solr_user": "logsearch-solr", 
-            "logsearch_solr_maxmem": "1024", 
+        "infra-solr-env": {
+            "infra_solr_datadir": "/opt/logsearch_solr/data",
+            "infra_solr_keystore_location": "/etc/security/serverKeys/logsearch.keyStore.jks",
+            "infra_solr_kerberos_name_rules": "DEFAULT",
+            "infra_solr_user": "infra-solr",
+            "infra_solr_maxmem": "1024",
             "content": "#!/bin/bash\n# Licensed to the Apache Software Foundation (ASF) under one or more\n# contributor license agreements. See the NOTICE file distributed with\n# this work for additional information regarding copyright ownership.\n# The ASF licenses this file to You under the Apache License, Version 2.0\n# (the \"License\"); you may not use this file except in compliance with\n# the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# By default the script will use JAVA_HOME to determine which java\n# to use, but you can set a specific path for Solr to use without\n# affecting other Java applications on your server/workstation.\nSOLR_JAVA_HOME={{java64_home}}\n\n# Increase Java Min/Max Heap as needed to support your indexing / query needs\nSOLR_JAVA_MEM=\"-Xms{{logsearch_solr_min_mem}}m -Xmx{{logsearch_solr_max_mem}}m\"\n\n# Enable verbose GC logging\nGC_LOG_OPTS=\"-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \\\n-XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime\"\n\n# These GC settings have shown to work well for a number of common Solr workloads\nGC_TUNE=\"-XX:NewRatio=3 \\\n-XX:SurvivorRatio=4 \\\n-XX:TargetSurvivorRatio=90 \\\n-XX:MaxTenuringThreshold=8 \\\n-XX:+UseConcMarkSweepGC \\\n-XX:+UseParNewGC \\\n-XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 \\\n-XX:+CMSScavengeBeforeRemark \\\n-XX:PretenureSizeThreshold=64m \\\n-XX:+UseCMSInitiatingOccupancyOnly \\\n-XX:CMSInitiatingOccupancyFraction=50 \\\n-XX:CMSMaxAbortablePrecleanTime=6000 \\\n-XX:+CMSParallelRemarkEnabled \\\n-XX:+ParallelRefProcEnabled\"\n\n# Set the ZooKeeper connection string if using an external ZooKeeper ensemble\n# e.g. host1:2181,host2:2181/chroot\n# Leave empty if not using SolrCloud\nZK_HOST=\"{{zookeeper_quorum}}{{logsearch_solr_znode}}\"\n\n# Set the ZooKeeper client timeout (for SolrCloud mode)\nZK_CLIENT_TIMEOUT=\"60000\"\n\n# By default the start script uses \"localhost\"; override the hostname here\n# for production SolrCloud environments to control the hostname exposed to cluster state\n#SOLR_HOST=\"192.168.1.1\"\n\n# By default the start script uses UTC; override the timezone if needed\n#SOLR_TIMEZONE=\"UTC\"\n\n# Set to true to activate the JMX RMI connector to allow remote JMX client applications\n# to monitor the JVM hosting Solr; set to \"false\" to disable that behavior\n# (false is recommended in production environments)\nENABLE_REMOTE_JMX_OPTS=\"true\"\n\n# The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here\nRMI_PORT={{logsearch_solr_jmx_port}}\n\n# Anything you add to the SOLR_OPTS variable will be included in the java\n# start command line as-is, in ADDITION to other options. If you specify the\n# -a option on start script, those options will be appended as well. Examples:\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.autoSoftCommit.maxTime=3000\"\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.autoCommit.maxTime=60000\"\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.clustering.enabled=true\"\n\n# Location where the bin/solr script will save PID files for running instances\n# If not set, the script will create PID files in $SOLR_TIP/bin\nSOLR_PID_DIR={{logsearch_solr_piddir}}\n\n# Path to a directory where Solr creates index files, the specified directory\n# must contain a solr.xml; by default, Solr will use server/solr\nSOLR_HOME={{logsearch_solr_datadir}}\n\n# Solr provides a default Log4J configuration properties file in server/resources\n# however, you may want to customize the log settings and file appender location\n# so you can point the script to use a different log4j.properties file\nLOG4J_PROPS={{logsearch_solr_conf}}/log4j.properties\n\n# Location where Solr should write logs to; should agree with the file appender\n# settings in server/resources/log4j.properties\nSOLR_LOGS_DIR={{logsearch_solr_log_dir}}\n\n# Sets the port Solr binds to, default is 8983\nSOLR_PORT={{logsearch_solr_port}}\n\n# Be sure to update the paths to the correct keystore for your environment\n{% if logsearch_solr_ssl_enabled %}\nSOLR_SSL_KEY_STORE={{logsearch_solr_keystore_location}}\nSOLR_SSL_KEY_STORE_PASSWORD={{logsearch_solr_keystore_password}}\nSOLR_SSL_TRUST_STORE={{logsearch_solr_keystore_location}}\nSOLR_SSL_TRUST_STORE_PASSWORD={{logsearch_solr_keystore_password}}\nSOLR_SSL_NEED_CLIENT_AUTH=false\nSOLR_SSL_WANT_CLIENT_AUTH=false\n{% endif %}\n\n# Uncomment to set a specific SSL port (-Djetty.ssl.port=N); if not set\n# and you are using SSL, then the start script will use SOLR_PORT for the SSL port\n#SOLR_SSL_PORT=\n\n{% if security_enabled -%}\nSOLR_HOST=`hostname -f`\nSOLR_JAAS_FILE={{logsearch_solr_jaas_file}}\nSOLR_KERB_KEYTAB={{logsearch_solr_web_kerberos_keytab}}\nSOLR_KERB_PRINCIPAL={{logsearch_solr_web_kerberos_principal}}\nSOLR_KERB_NAME_RULES={{logsearch_solr_kerberos_name_rules}}\n\nSOLR_AUTHENTICATION_CLIENT_CONFIGURER=\"org.apache.solr.client.solrj.impl.Krb5HttpClientConfigurer\"\nSOLR_AUTHENTICATION_OPTS=\" -DauthenticationPlugin=org.apache.solr.security.KerberosPlugin -Djava.security.auth.login.config=$SOLR_JAAS_FILE -Dsolr.kerberos.principal=${SOLR_KERB_PRINCIPAL} -Dsolr.kerberos.keytab=${SOLR_KERB_KEYTAB} -Dsolr.kerberos.cookie.domain=${SOLR_HOST} -Dsolr.kerberos.name.rules=${SOLR_KERB_NAME_RULES}\"\n{% endif %}", 
-            "logsearch_solr_pid_dir": "/var/run/ambari-logsearch-solr", 
-            "logsearch_solr_truststore_password": "bigdata", 
-            "logsearch_solr_truststore_type": "jks", 
-            "logsearch_solr_keystore_type": "jks", 
-            "logsearch_solr_log_dir": "/var/log/ambari-logsearch-solr", 
-            "logsearch_solr_web_kerberos_keytab": "/etc/security/keytabs/spnego.service.keytab", 
-            "logsearch_solr_ssl_enabled": "false", 
-            "logsearch_solr_client_log_dir": "/var/log/ambari-logsearch-solr-client", 
-            "logsearch_solr_web_kerberos_principal": "HTTP/_HOST@EXAMPLE.COM", 
-            "logsearch_solr_znode": "/ambari-solr", 
-            "logsearch_solr_keystore_password": "bigdata", 
-            "logsearch_solr_port": "8886", 
-            "logsearch_solr_kerberos_principal": "logsearch-solr/_HOST@EXAMPLE.COM", 
-            "logsearch_solr_jmx_port": "18886", 
-            "logsearch_solr_truststore_location": "/etc/security/serverKeys/logsearch.trustStore.jks", 
-            "logsearch_solr_minmem": "512", 
-            "logsearch_solr_kerberos_keytab": "/etc/security/keytabs/logsearch-solr.service.keytab"
+            "infra_solr_pid_dir": "/var/run/ambari-infra-solr",
+            "infra_solr_truststore_password": "bigdata",
+            "infra_solr_truststore_type": "jks",
+            "infra_solr_keystore_type": "jks",
+            "infra_solr_log_dir": "/var/log/ambari-infra-solr",
+            "infra_solr_web_kerberos_keytab": "/etc/security/keytabs/spnego.service.keytab",
+            "infra_solr_ssl_enabled": "false",
+            "infra_solr_client_log_dir": "/var/log/ambari-infra-solr-client",
+            "infra_solr_web_kerberos_principal": "HTTP/_HOST@EXAMPLE.COM",
+            "infra_solr_znode": "/ambari-solr",
+            "infra_solr_keystore_password": "bigdata",
+            "infra_solr_port": "8886",
+            "infra_solr_kerberos_principal": "infra-solr/_HOST@EXAMPLE.COM",
+            "infra_solr_jmx_port": "18886",
+            "infra_solr_truststore_location": "/etc/security/serverKeys/infra-solr.trustStore.jks",
+            "infra_solr_minmem": "512",
+            "infra_solr_kerberos_keytab": "/etc/security/keytabs/infra-solr.service.keytab"
         },
-        "logsearch-solr-client-log4j": {
+        "infra-solr-client-log4j": {
+            "infra_solr_client_log_dir" : "/var/log/ambari-infra-solr-client",
             "content": "# Copyright 2011 The Apache Software Foundation\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nlog4j.rootLogger=INFO,file,stdout,stderr\n\nlog4j.appender.file=org.apache.log4j.RollingFileAppender\nlog4j.appender.file.File={{logsearch_solr_client_log}}\nlog4j.appender.file.MaxFileSize=80MB\nlog4j.appender.file.MaxBackupIndex=60\nlog4j.appender.file.layout=org.apache.log4j.PatternLayout\nlog4j.appender.file.layout.ConversionPattern=%d{DATE} %5p [%t] %c{1}:%L - %m%n\n\nlog4j.appender.stdout=org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.Threshold=INFO\nlog4j.appender.stdout.Target=System.out\nlog4j.appender.stdout.layout=org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%m%n\n\nlog4j.appender.stderr=org.apache.log4j.ConsoleAppender\nlog4j.appender.stderr.Threshold=ERROR\nlog4j.appender.stderr.Target=System.err\nlog4j.appender.stderr.layout=org.apache.log4j.PatternLayout\nlog4j.appender.stderr.layout.ConversionPattern=%m%n"
         },
         "cluster-env": {
@@ -758,7 +759,7 @@
             "user_group": "hadoop", 
             "stack_tools": "{\n  \"stack_selector\": [\"hdp-select\", \"/usr/bin/hdp-select\", \"hdp-select\"],\n  \"conf_selector\": [\"conf-select\", \"/usr/bin/conf-select\", \"conf-select\"]\n}", 
             "recovery_retry_interval": "5", 
-            "stack_features": "{\n  \"stack_features\": [\n    {\n      \"name\": \"snappy\",\n      \"description\": \"Snappy compressor/decompressor support\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"lzo\",\n      \"description\": \"LZO libraries support\",\n      \"min_version\": \"2.2.1.0\"\n    },\n    {\n      \"name\": \"express_upgrade\",\n      \"description\": \"Express upgrade support\",\n      \"min_version\": \"2.1.0.0\"\n    },\n    {\n      \"name\": \"rolling_upgrade\",\n      \"description\": \"Rolling upgrade support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"config_versioning\",\n      \"description\": \"Configurable versions support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"datanode_non_root\",\n      \"description\": \"DataNode running as non-root support (AMBARI-7615)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"remove_ranger_hdfs_plugin_env\",\n      \"description\": \"HDFS removes Ranger env files (AMBARI-14299)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger\",\n      \"description\": \"Ranger Service support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_tagsync_component\",\n      \"description\": \"Ranger Tagsync component support (AMBARI-14383)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"phoenix\",\n      \"description\": \"Phoenix Service support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"nfs\",\n      \"description\": \"NFS support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"tez_for_spark\",\n      \"description\": \"Tez dependency for Spark\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"timeline_state_store\",\n      \"description\": \"Yarn application timeline-service supports state store property (AMBARI-11442)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"copy_tarball_to_hdfs\",\n      \"description\": \"Copy tarball to HDFS support (AMBARI-12113)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"spark_16plus\",\n      \"description\": \"Spark 1.6+\",\n      \"min_version\": \"2.4.0.0\"\n    },\n    {\n      \"name\": \"spark_thriftserver\",\n      \"description\": \"Spark Thrift Server\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"storm_kerberos\",\n      \"description\": \"Storm Kerberos support (AMBARI-7570)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"storm_ams\",\n      \"description\": \"Storm AMS integration (AMBARI-10710)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"create_kafka_broker_id\",\n      \"description\": \"Ambari should create Kafka Broker Id (AMBARI-12678)\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_listeners\",\n      \"description\": \"Kafka listeners (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_kerberos\",\n      \"description\": \"Kafka Kerberos support (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"pig_on_tez\",\n      \"description\": \"Pig on Tez support (AMBARI-7863)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_non_root\",\n      \"description\": \"Ranger Usersync as non-root user (AMBARI-10416)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger_audit_db_support\",\n      \"description\": \"Ranger Audit to DB support\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"accumulo_kerberos_user_auth\",\n      \"description\": \"Accumulo Kerberos User Auth (AMBARI-10163)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"knox_versioned_data_dir\",\n      \"description\": \"Use versioned data dir for Knox (AMBARI-13164)\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"knox_sso_topology\",\n      \"description\": \"Knox SSO Topology support (AMBARI-13975)\",\n      \"min_version\": \"2.3.8.0\"\n    },\n    {\n      \"name\": \"atlas_rolling_upgrade\",\n      \"description\": \"Rolling upgrade support for Atlas\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"oozie_admin_user\",\n      \"description\": \"Oozie install user as an Oozie admin user (AMBARI-7976)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_create_hive_tez_configs\",\n      \"description\": \"Oozie create configs for Ambari Hive and Tez deployments (AMBARI-8074)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_setup_shared_lib\",\n      \"description\": \"Oozie setup tools used to shared Oozie lib to HDFS (AMBARI-7240)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_host_kerberos\",\n      \"description\": \"Oozie in secured clusters uses _HOST in Kerberos principal (AMBARI-9775)\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"falcon_extensions\",\n      \"description\": \"Falcon Extension\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_upgrade_schema\",\n      \"description\": \"Hive metastore upgrade schema support (AMBARI-11176)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server_interactive\",\n      \"description\": \"Hive server interactive support (AMBARI-15573)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_webhcat_specific_configs\",\n      \"description\": \"Hive webhcat specific configurations support (AMBARI-12364)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_purge_table\",\n      \"description\": \"Hive purge table support (AMBARI-12260)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server2_kerberized_env\",\n      \"description\": \"Hive server2 working on kerberized environment (AMBARI-13749)\",\n      \"min_version\": \"2.2.3.0\",\n      \"max_version\": \"2.2.5.0\"\n    },\n    {\n      \"name\": \"hive_env_heapsize\",\n      \"description\": \"Hive heapsize property defined in hive-env (AMBARI-12801)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_hsm_support\",\n      \"description\": \"Ranger KMS HSM support (AMBARI-15752)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_log4j_support\",\n      \"description\": \"Ranger supporting log-4j properties (AMBARI-15681)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kerberos_support\",\n      \"description\": \"Ranger Kerberos support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_site_support\",\n      \"description\": \"Hive Metastore site support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_password_jceks\",\n      \"description\": \"Saving Ranger Usersync credentials in jceks\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_install_logsearch_client\",\n      \"description\": \"LogSearch Service support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hbase_home_directory\",\n      \"description\": \"Hbase home directory in HDFS needed for HBASE backup\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"spark_livy\",\n      \"description\": \"Livy as slave component of spark\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"atlas_ranger_plugin_support\",\n      \"description\": \"Atlas Ranger plugin support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_pid_support\",\n      \"description\": \"Ranger Service support pid generation AMBARI-16756\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_pid_support\",\n      \"description\": \"Ranger KMS Service support pid generation\",\n      \"min_version\": \"2.5.0.0\"\n    }\n  ]\n}", 
+            "stack_features": "{\n  \"stack_features\": [\n    {\n      \"name\": \"snappy\",\n      \"description\": \"Snappy compressor/decompressor support\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"lzo\",\n      \"description\": \"LZO libraries support\",\n      \"min_version\": \"2.2.1.0\"\n    },\n    {\n      \"name\": \"express_upgrade\",\n      \"description\": \"Express upgrade support\",\n      \"min_version\": \"2.1.0.0\"\n    },\n    {\n      \"name\": \"rolling_upgrade\",\n      \"description\": \"Rolling upgrade support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"config_versioning\",\n      \"description\": \"Configurable versions support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"datanode_non_root\",\n      \"description\": \"DataNode running as non-root support (AMBARI-7615)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"remove_ranger_hdfs_plugin_env\",\n      \"description\": \"HDFS removes Ranger env files (AMBARI-14299)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger\",\n      \"description\": \"Ranger Service support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_tagsync_component\",\n      \"description\": \"Ranger Tagsync component support (AMBARI-14383)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"phoenix\",\n      \"description\": \"Phoenix Service support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"nfs\",\n      \"description\": \"NFS support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"tez_for_spark\",\n      \"description\": \"Tez dependency for Spark\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"timeline_state_store\",\n      \"description\": \"Yarn application timeline-service supports state store property (AMBARI-11442)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"copy_tarball_to_hdfs\",\n      \"description\": \"Copy tarball to HDFS support (AMBARI-12113)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"spark_16plus\",\n      \"description\": \"Spark 1.6+\",\n      \"min_version\": \"2.4.0.0\"\n    },\n    {\n      \"name\": \"spark_thriftserver\",\n      \"description\": \"Spark Thrift Server\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"storm_kerberos\",\n      \"description\": \"Storm Kerberos support (AMBARI-7570)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"storm_ams\",\n      \"description\": \"Storm AMS integration (AMBARI-10710)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"create_kafka_broker_id\",\n      \"description\": \"Ambari should create Kafka Broker Id (AMBARI-12678)\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_listeners\",\n      \"description\": \"Kafka listeners (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_kerberos\",\n      \"description\": \"Kafka Kerberos support (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"pig_on_tez\",\n      \"description\": \"Pig on Tez support (AMBARI-7863)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_non_root\",\n      \"description\": \"Ranger Usersync as non-root user (AMBARI-10416)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger_audit_db_support\",\n      \"description\": \"Ranger Audit to DB support\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"accumulo_kerberos_user_auth\",\n      \"description\": \"Accumulo Kerberos User Auth (AMBARI-10163)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"knox_versioned_data_dir\",\n      \"description\": \"Use versioned data dir for Knox (AMBARI-13164)\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"knox_sso_topology\",\n      \"description\": \"Knox SSO Topology support (AMBARI-13975)\",\n      \"min_version\": \"2.3.8.0\"\n    },\n    {\n      \"name\": \"atlas_rolling_upgrade\",\n      \"description\": \"Rolling upgrade support for Atlas\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"oozie_admin_user\",\n      \"description\": \"Oozie install user as an Oozie admin user (AMBARI-7976)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_create_hive_tez_configs\",\n      \"description\": \"Oozie create configs for Ambari Hive and Tez deployments (AMBARI-8074)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_setup_shared_lib\",\n      \"description\": \"Oozie setup tools used to shared Oozie lib to HDFS (AMBARI-7240)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_host_kerberos\",\n      \"description\": \"Oozie in secured clusters uses _HOST in Kerberos principal (AMBARI-9775)\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"falcon_extensions\",\n      \"description\": \"Falcon Extension\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_upgrade_schema\",\n      \"description\": \"Hive metastore upgrade schema support (AMBARI-11176)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server_interactive\",\n      \"description\": \"Hive server interactive support (AMBARI-15573)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_webhcat_specific_configs\",\n      \"description\": \"Hive webhcat specific configurations support (AMBARI-12364)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_purge_table\",\n      \"description\": \"Hive purge table support (AMBARI-12260)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server2_kerberized_env\",\n      \"description\": \"Hive server2 working on kerberized environment (AMBARI-13749)\",\n      \"min_version\": \"2.2.3.0\",\n      \"max_version\": \"2.2.5.0\"\n    },\n    {\n      \"name\": \"hive_env_heapsize\",\n      \"description\": \"Hive heapsize property defined in hive-env (AMBARI-12801)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_hsm_support\",\n      \"description\": \"Ranger KMS HSM support (AMBARI-15752)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_log4j_support\",\n      \"description\": \"Ranger supporting log-4j properties (AMBARI-15681)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kerberos_support\",\n      \"description\": \"Ranger Kerberos support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_site_support\",\n      \"description\": \"Hive Metastore site support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_password_jceks\",\n      \"description\": \"Saving Ranger Usersync credentials in jceks\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_install_infra_client\",\n      \"description\": \"LogSearch Service support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hbase_home_directory\",\n      \"description\": \"Hbase home directory in HDFS needed for HBASE backup\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"spark_livy\",\n      \"description\": \"Livy as slave component of spark\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"atlas_ranger_plugin_support\",\n      \"description\": \"Atlas Ranger plugin support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_pid_support\",\n      \"description\": \"Ranger Service support pid generation AMBARI-16756\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_pid_support\",\n      \"description\": \"Ranger KMS Service support pid generation\",\n      \"min_version\": \"2.5.0.0\"\n    }\n  ]\n}",
             "recovery_enabled": "true", 
             "smokeuser_principal_name": "ambari-qa-test_cluster01@EXAMPLE.COM", 
             "recovery_max_count": "6", 
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json
index 5f7185c..05cb78a 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json
@@ -689,7 +689,7 @@
             "user_group": "hadoop", 
             "stack_tools": "{\n  \"stack_selector\": [\"hdp-select\", \"/usr/bin/hdp-select\", \"hdp-select\"],\n  \"conf_selector\": [\"conf-select\", \"/usr/bin/conf-select\", \"conf-select\"]\n}", 
             "recovery_retry_interval": "5", 
-            "stack_features": "{\n  \"stack_features\": [\n    {\n      \"name\": \"snappy\",\n      \"description\": \"Snappy compressor/decompressor support\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"lzo\",\n      \"description\": \"LZO libraries support\",\n      \"min_version\": \"2.2.1.0\"\n    },\n    {\n      \"name\": \"express_upgrade\",\n      \"description\": \"Express upgrade support\",\n      \"min_version\": \"2.1.0.0\"\n    },\n    {\n      \"name\": \"rolling_upgrade\",\n      \"description\": \"Rolling upgrade support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"config_versioning\",\n      \"description\": \"Configurable versions support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"datanode_non_root\",\n      \"description\": \"DataNode running as non-root support (AMBARI-7615)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"remove_ranger_hdfs_plugin_env\",\n      \"description\": \"HDFS removes Ranger env files (AMBARI-14299)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger\",\n      \"description\": \"Ranger Service support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_tagsync_component\",\n      \"description\": \"Ranger Tagsync component support (AMBARI-14383)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"phoenix\",\n      \"description\": \"Phoenix Service support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"nfs\",\n      \"description\": \"NFS support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"tez_for_spark\",\n      \"description\": \"Tez dependency for Spark\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"timeline_state_store\",\n      \"description\": \"Yarn application timeline-service supports state store property (AMBARI-11442)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"copy_tarball_to_hdfs\",\n      \"description\": \"Copy tarball to HDFS support (AMBARI-12113)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"spark_16plus\",\n      \"description\": \"Spark 1.6+\",\n      \"min_version\": \"2.4.0.0\"\n    },\n    {\n      \"name\": \"spark_thriftserver\",\n      \"description\": \"Spark Thrift Server\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"storm_kerberos\",\n      \"description\": \"Storm Kerberos support (AMBARI-7570)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"storm_ams\",\n      \"description\": \"Storm AMS integration (AMBARI-10710)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"create_kafka_broker_id\",\n      \"description\": \"Ambari should create Kafka Broker Id (AMBARI-12678)\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_listeners\",\n      \"description\": \"Kafka listeners (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_kerberos\",\n      \"description\": \"Kafka Kerberos support (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"pig_on_tez\",\n      \"description\": \"Pig on Tez support (AMBARI-7863)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_non_root\",\n      \"description\": \"Ranger Usersync as non-root user (AMBARI-10416)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger_audit_db_support\",\n      \"description\": \"Ranger Audit to DB support\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"accumulo_kerberos_user_auth\",\n      \"description\": \"Accumulo Kerberos User Auth (AMBARI-10163)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"knox_versioned_data_dir\",\n      \"description\": \"Use versioned data dir for Knox (AMBARI-13164)\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"knox_sso_topology\",\n      \"description\": \"Knox SSO Topology support (AMBARI-13975)\",\n      \"min_version\": \"2.3.8.0\"\n    },\n    {\n      \"name\": \"atlas_rolling_upgrade\",\n      \"description\": \"Rolling upgrade support for Atlas\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"oozie_admin_user\",\n      \"description\": \"Oozie install user as an Oozie admin user (AMBARI-7976)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_create_hive_tez_configs\",\n      \"description\": \"Oozie create configs for Ambari Hive and Tez deployments (AMBARI-8074)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_setup_shared_lib\",\n      \"description\": \"Oozie setup tools used to shared Oozie lib to HDFS (AMBARI-7240)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_host_kerberos\",\n      \"description\": \"Oozie in secured clusters uses _HOST in Kerberos principal (AMBARI-9775)\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"falcon_extensions\",\n      \"description\": \"Falcon Extension\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_upgrade_schema\",\n      \"description\": \"Hive metastore upgrade schema support (AMBARI-11176)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server_interactive\",\n      \"description\": \"Hive server interactive support (AMBARI-15573)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_webhcat_specific_configs\",\n      \"description\": \"Hive webhcat specific configurations support (AMBARI-12364)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_purge_table\",\n      \"description\": \"Hive purge table support (AMBARI-12260)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server2_kerberized_env\",\n      \"description\": \"Hive server2 working on kerberized environment (AMBARI-13749)\",\n      \"min_version\": \"2.2.3.0\",\n      \"max_version\": \"2.2.5.0\"\n    },\n    {\n      \"name\": \"hive_env_heapsize\",\n      \"description\": \"Hive heapsize property defined in hive-env (AMBARI-12801)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_hsm_support\",\n      \"description\": \"Ranger KMS HSM support (AMBARI-15752)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_log4j_support\",\n      \"description\": \"Ranger supporting log-4j properties (AMBARI-15681)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kerberos_support\",\n      \"description\": \"Ranger Kerberos support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_site_support\",\n      \"description\": \"Hive Metastore site support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_password_jceks\",\n      \"description\": \"Saving Ranger Usersync credentials in jceks\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_install_logsearch_client\",\n      \"description\": \"LogSearch Service support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hbase_home_directory\",\n      \"description\": \"Hbase home directory in HDFS needed for HBASE backup\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"spark_livy\",\n      \"description\": \"Livy as slave component of spark\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"atlas_ranger_plugin_support\",\n      \"description\": \"Atlas Ranger plugin support\",\n      \"min_version\": \"2.5.0.0\"\n    }\n  ]\n}", 
+            "stack_features": "{\n  \"stack_features\": [\n    {\n      \"name\": \"snappy\",\n      \"description\": \"Snappy compressor/decompressor support\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"lzo\",\n      \"description\": \"LZO libraries support\",\n      \"min_version\": \"2.2.1.0\"\n    },\n    {\n      \"name\": \"express_upgrade\",\n      \"description\": \"Express upgrade support\",\n      \"min_version\": \"2.1.0.0\"\n    },\n    {\n      \"name\": \"rolling_upgrade\",\n      \"description\": \"Rolling upgrade support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"config_versioning\",\n      \"description\": \"Configurable versions support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"datanode_non_root\",\n      \"description\": \"DataNode running as non-root support (AMBARI-7615)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"remove_ranger_hdfs_plugin_env\",\n      \"description\": \"HDFS removes Ranger env files (AMBARI-14299)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger\",\n      \"description\": \"Ranger Service support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_tagsync_component\",\n      \"description\": \"Ranger Tagsync component support (AMBARI-14383)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"phoenix\",\n      \"description\": \"Phoenix Service support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"nfs\",\n      \"description\": \"NFS support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"tez_for_spark\",\n      \"description\": \"Tez dependency for Spark\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"timeline_state_store\",\n      \"description\": \"Yarn application timeline-service supports state store property (AMBARI-11442)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"copy_tarball_to_hdfs\",\n      \"description\": \"Copy tarball to HDFS support (AMBARI-12113)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"spark_16plus\",\n      \"description\": \"Spark 1.6+\",\n      \"min_version\": \"2.4.0.0\"\n    },\n    {\n      \"name\": \"spark_thriftserver\",\n      \"description\": \"Spark Thrift Server\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"storm_kerberos\",\n      \"description\": \"Storm Kerberos support (AMBARI-7570)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"storm_ams\",\n      \"description\": \"Storm AMS integration (AMBARI-10710)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"create_kafka_broker_id\",\n      \"description\": \"Ambari should create Kafka Broker Id (AMBARI-12678)\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_listeners\",\n      \"description\": \"Kafka listeners (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_kerberos\",\n      \"description\": \"Kafka Kerberos support (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"pig_on_tez\",\n      \"description\": \"Pig on Tez support (AMBARI-7863)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_non_root\",\n      \"description\": \"Ranger Usersync as non-root user (AMBARI-10416)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger_audit_db_support\",\n      \"description\": \"Ranger Audit to DB support\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"accumulo_kerberos_user_auth\",\n      \"description\": \"Accumulo Kerberos User Auth (AMBARI-10163)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"knox_versioned_data_dir\",\n      \"description\": \"Use versioned data dir for Knox (AMBARI-13164)\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"knox_sso_topology\",\n      \"description\": \"Knox SSO Topology support (AMBARI-13975)\",\n      \"min_version\": \"2.3.8.0\"\n    },\n    {\n      \"name\": \"atlas_rolling_upgrade\",\n      \"description\": \"Rolling upgrade support for Atlas\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"oozie_admin_user\",\n      \"description\": \"Oozie install user as an Oozie admin user (AMBARI-7976)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_create_hive_tez_configs\",\n      \"description\": \"Oozie create configs for Ambari Hive and Tez deployments (AMBARI-8074)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_setup_shared_lib\",\n      \"description\": \"Oozie setup tools used to shared Oozie lib to HDFS (AMBARI-7240)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_host_kerberos\",\n      \"description\": \"Oozie in secured clusters uses _HOST in Kerberos principal (AMBARI-9775)\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"falcon_extensions\",\n      \"description\": \"Falcon Extension\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_upgrade_schema\",\n      \"description\": \"Hive metastore upgrade schema support (AMBARI-11176)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server_interactive\",\n      \"description\": \"Hive server interactive support (AMBARI-15573)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_webhcat_specific_configs\",\n      \"description\": \"Hive webhcat specific configurations support (AMBARI-12364)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_purge_table\",\n      \"description\": \"Hive purge table support (AMBARI-12260)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server2_kerberized_env\",\n      \"description\": \"Hive server2 working on kerberized environment (AMBARI-13749)\",\n      \"min_version\": \"2.2.3.0\",\n      \"max_version\": \"2.2.5.0\"\n    },\n    {\n      \"name\": \"hive_env_heapsize\",\n      \"description\": \"Hive heapsize property defined in hive-env (AMBARI-12801)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_hsm_support\",\n      \"description\": \"Ranger KMS HSM support (AMBARI-15752)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_log4j_support\",\n      \"description\": \"Ranger supporting log-4j properties (AMBARI-15681)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kerberos_support\",\n      \"description\": \"Ranger Kerberos support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_site_support\",\n      \"description\": \"Hive Metastore site support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_password_jceks\",\n      \"description\": \"Saving Ranger Usersync credentials in jceks\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_install_infra_client\",\n      \"description\": \"LogSearch Service support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hbase_home_directory\",\n      \"description\": \"Hbase home directory in HDFS needed for HBASE backup\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"spark_livy\",\n      \"description\": \"Livy as slave component of spark\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"atlas_ranger_plugin_support\",\n      \"description\": \"Atlas Ranger plugin support\",\n      \"min_version\": \"2.5.0.0\"\n    }\n  ]\n}",
             "recovery_enabled": "true", 
             "recovery_max_count": "6", 
             "stack_root": "/usr/hdp", 
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
index acc5c21..7054e8f 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
@@ -859,7 +859,7 @@
             "user_group": "hadoop", 
             "stack_tools": "{\n  \"stack_selector\": [\"hdp-select\", \"/usr/bin/hdp-select\", \"hdp-select\"],\n  \"conf_selector\": [\"conf-select\", \"/usr/bin/conf-select\", \"conf-select\"]\n}", 
             "recovery_retry_interval": "5", 
-            "stack_features": "{\n  \"stack_features\": [\n    {\n      \"name\": \"snappy\",\n      \"description\": \"Snappy compressor/decompressor support\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"lzo\",\n      \"description\": \"LZO libraries support\",\n      \"min_version\": \"2.2.1.0\"\n    },\n    {\n      \"name\": \"express_upgrade\",\n      \"description\": \"Express upgrade support\",\n      \"min_version\": \"2.1.0.0\"\n    },\n    {\n      \"name\": \"rolling_upgrade\",\n      \"description\": \"Rolling upgrade support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"config_versioning\",\n      \"description\": \"Configurable versions support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"datanode_non_root\",\n      \"description\": \"DataNode running as non-root support (AMBARI-7615)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"remove_ranger_hdfs_plugin_env\",\n      \"description\": \"HDFS removes Ranger env files (AMBARI-14299)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger\",\n      \"description\": \"Ranger Service support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_tagsync_component\",\n      \"description\": \"Ranger Tagsync component support (AMBARI-14383)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"phoenix\",\n      \"description\": \"Phoenix Service support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"nfs\",\n      \"description\": \"NFS support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"tez_for_spark\",\n      \"description\": \"Tez dependency for Spark\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"timeline_state_store\",\n      \"description\": \"Yarn application timeline-service supports state store property (AMBARI-11442)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"copy_tarball_to_hdfs\",\n      \"description\": \"Copy tarball to HDFS support (AMBARI-12113)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"spark_16plus\",\n      \"description\": \"Spark 1.6+\",\n      \"min_version\": \"2.4.0.0\"\n    },\n    {\n      \"name\": \"spark_thriftserver\",\n      \"description\": \"Spark Thrift Server\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"storm_kerberos\",\n      \"description\": \"Storm Kerberos support (AMBARI-7570)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"storm_ams\",\n      \"description\": \"Storm AMS integration (AMBARI-10710)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"create_kafka_broker_id\",\n      \"description\": \"Ambari should create Kafka Broker Id (AMBARI-12678)\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_listeners\",\n      \"description\": \"Kafka listeners (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_kerberos\",\n      \"description\": \"Kafka Kerberos support (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"pig_on_tez\",\n      \"description\": \"Pig on Tez support (AMBARI-7863)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_non_root\",\n      \"description\": \"Ranger Usersync as non-root user (AMBARI-10416)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger_audit_db_support\",\n      \"description\": \"Ranger Audit to DB support\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"accumulo_kerberos_user_auth\",\n      \"description\": \"Accumulo Kerberos User Auth (AMBARI-10163)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"knox_versioned_data_dir\",\n      \"description\": \"Use versioned data dir for Knox (AMBARI-13164)\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"knox_sso_topology\",\n      \"description\": \"Knox SSO Topology support (AMBARI-13975)\",\n      \"min_version\": \"2.3.8.0\"\n    },\n    {\n      \"name\": \"atlas_rolling_upgrade\",\n      \"description\": \"Rolling upgrade support for Atlas\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"oozie_admin_user\",\n      \"description\": \"Oozie install user as an Oozie admin user (AMBARI-7976)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_create_hive_tez_configs\",\n      \"description\": \"Oozie create configs for Ambari Hive and Tez deployments (AMBARI-8074)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_setup_shared_lib\",\n      \"description\": \"Oozie setup tools used to shared Oozie lib to HDFS (AMBARI-7240)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_host_kerberos\",\n      \"description\": \"Oozie in secured clusters uses _HOST in Kerberos principal (AMBARI-9775)\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"falcon_extensions\",\n      \"description\": \"Falcon Extension\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_upgrade_schema\",\n      \"description\": \"Hive metastore upgrade schema support (AMBARI-11176)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server_interactive\",\n      \"description\": \"Hive server interactive support (AMBARI-15573)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_webhcat_specific_configs\",\n      \"description\": \"Hive webhcat specific configurations support (AMBARI-12364)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_purge_table\",\n      \"description\": \"Hive purge table support (AMBARI-12260)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server2_kerberized_env\",\n      \"description\": \"Hive server2 working on kerberized environment (AMBARI-13749)\",\n      \"min_version\": \"2.2.3.0\",\n      \"max_version\": \"2.2.5.0\"\n    },\n    {\n      \"name\": \"hive_env_heapsize\",\n      \"description\": \"Hive heapsize property defined in hive-env (AMBARI-12801)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_hsm_support\",\n      \"description\": \"Ranger KMS HSM support (AMBARI-15752)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_log4j_support\",\n      \"description\": \"Ranger supporting log-4j properties (AMBARI-15681)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kerberos_support\",\n      \"description\": \"Ranger Kerberos support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_site_support\",\n      \"description\": \"Hive Metastore site support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_password_jceks\",\n      \"description\": \"Saving Ranger Usersync credentials in jceks\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_install_logsearch_client\",\n      \"description\": \"LogSearch Service support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hbase_home_directory\",\n      \"description\": \"Hbase home directory in HDFS needed for HBASE backup\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"spark_livy\",\n      \"description\": \"Livy as slave component of spark\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"atlas_ranger_plugin_support\",\n      \"description\": \"Atlas Ranger plugin support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_pid_support\",\n      \"description\": \"Ranger Service support pid generation AMBARI-16756\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_pid_support\",\n      \"description\": \"Ranger KMS Service support pid generation\",\n      \"min_version\": \"2.5.0.0\"\n    }\n  ]\n}", 
+            "stack_features": "{\n  \"stack_features\": [\n    {\n      \"name\": \"snappy\",\n      \"description\": \"Snappy compressor/decompressor support\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"lzo\",\n      \"description\": \"LZO libraries support\",\n      \"min_version\": \"2.2.1.0\"\n    },\n    {\n      \"name\": \"express_upgrade\",\n      \"description\": \"Express upgrade support\",\n      \"min_version\": \"2.1.0.0\"\n    },\n    {\n      \"name\": \"rolling_upgrade\",\n      \"description\": \"Rolling upgrade support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"config_versioning\",\n      \"description\": \"Configurable versions support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"datanode_non_root\",\n      \"description\": \"DataNode running as non-root support (AMBARI-7615)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"remove_ranger_hdfs_plugin_env\",\n      \"description\": \"HDFS removes Ranger env files (AMBARI-14299)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger\",\n      \"description\": \"Ranger Service support\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_tagsync_component\",\n      \"description\": \"Ranger Tagsync component support (AMBARI-14383)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"phoenix\",\n      \"description\": \"Phoenix Service support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"nfs\",\n      \"description\": \"NFS support\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"tez_for_spark\",\n      \"description\": \"Tez dependency for Spark\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"timeline_state_store\",\n      \"description\": \"Yarn application timeline-service supports state store property (AMBARI-11442)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"copy_tarball_to_hdfs\",\n      \"description\": \"Copy tarball to HDFS support (AMBARI-12113)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"spark_16plus\",\n      \"description\": \"Spark 1.6+\",\n      \"min_version\": \"2.4.0.0\"\n    },\n    {\n      \"name\": \"spark_thriftserver\",\n      \"description\": \"Spark Thrift Server\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"storm_kerberos\",\n      \"description\": \"Storm Kerberos support (AMBARI-7570)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"storm_ams\",\n      \"description\": \"Storm AMS integration (AMBARI-10710)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"create_kafka_broker_id\",\n      \"description\": \"Ambari should create Kafka Broker Id (AMBARI-12678)\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_listeners\",\n      \"description\": \"Kafka listeners (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"kafka_kerberos\",\n      \"description\": \"Kafka Kerberos support (AMBARI-10984)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"pig_on_tez\",\n      \"description\": \"Pig on Tez support (AMBARI-7863)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_non_root\",\n      \"description\": \"Ranger Usersync as non-root user (AMBARI-10416)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"ranger_audit_db_support\",\n      \"description\": \"Ranger Audit to DB support\",\n      \"min_version\": \"2.2.0.0\",\n      \"max_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"accumulo_kerberos_user_auth\",\n      \"description\": \"Accumulo Kerberos User Auth (AMBARI-10163)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"knox_versioned_data_dir\",\n      \"description\": \"Use versioned data dir for Knox (AMBARI-13164)\",\n      \"min_version\": \"2.3.2.0\"\n    },\n    {\n      \"name\": \"knox_sso_topology\",\n      \"description\": \"Knox SSO Topology support (AMBARI-13975)\",\n      \"min_version\": \"2.3.8.0\"\n    },\n    {\n      \"name\": \"atlas_rolling_upgrade\",\n      \"description\": \"Rolling upgrade support for Atlas\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"oozie_admin_user\",\n      \"description\": \"Oozie install user as an Oozie admin user (AMBARI-7976)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_create_hive_tez_configs\",\n      \"description\": \"Oozie create configs for Ambari Hive and Tez deployments (AMBARI-8074)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_setup_shared_lib\",\n      \"description\": \"Oozie setup tools used to shared Oozie lib to HDFS (AMBARI-7240)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"oozie_host_kerberos\",\n      \"description\": \"Oozie in secured clusters uses _HOST in Kerberos principal (AMBARI-9775)\",\n      \"min_version\": \"2.0.0.0\",\n      \"max_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"falcon_extensions\",\n      \"description\": \"Falcon Extension\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_upgrade_schema\",\n      \"description\": \"Hive metastore upgrade schema support (AMBARI-11176)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server_interactive\",\n      \"description\": \"Hive server interactive support (AMBARI-15573)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_webhcat_specific_configs\",\n      \"description\": \"Hive webhcat specific configurations support (AMBARI-12364)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_purge_table\",\n      \"description\": \"Hive purge table support (AMBARI-12260)\",\n      \"min_version\": \"2.3.0.0\"\n    },\n    {\n      \"name\": \"hive_server2_kerberized_env\",\n      \"description\": \"Hive server2 working on kerberized environment (AMBARI-13749)\",\n      \"min_version\": \"2.2.3.0\",\n      \"max_version\": \"2.2.5.0\"\n    },\n    {\n      \"name\": \"hive_env_heapsize\",\n      \"description\": \"Hive heapsize property defined in hive-env (AMBARI-12801)\",\n      \"min_version\": \"2.2.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_hsm_support\",\n      \"description\": \"Ranger KMS HSM support (AMBARI-15752)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_log4j_support\",\n      \"description\": \"Ranger supporting log-4j properties (AMBARI-15681)\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kerberos_support\",\n      \"description\": \"Ranger Kerberos support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hive_metastore_site_support\",\n      \"description\": \"Hive Metastore site support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_usersync_password_jceks\",\n      \"description\": \"Saving Ranger Usersync credentials in jceks\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_install_infra_client\",\n      \"description\": \"LogSearch Service support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"hbase_home_directory\",\n      \"description\": \"Hbase home directory in HDFS needed for HBASE backup\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"spark_livy\",\n      \"description\": \"Livy as slave component of spark\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"atlas_ranger_plugin_support\",\n      \"description\": \"Atlas Ranger plugin support\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_pid_support\",\n      \"description\": \"Ranger Service support pid generation AMBARI-16756\",\n      \"min_version\": \"2.5.0.0\"\n    },\n    {\n      \"name\": \"ranger_kms_pid_support\",\n      \"description\": \"Ranger KMS Service support pid generation\",\n      \"min_version\": \"2.5.0.0\"\n    }\n  ]\n}",
             "recovery_enabled": "true", 
             "smokeuser_principal_name": "ambari-qa-test_cluster01@EXAMPLE.COM", 
             "recovery_max_count": "6", 
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index 13b9cde..1fad1d6 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -2065,88 +2065,81 @@
     "index": 7
   },
   {
-    "name": "logsearch_solr_audit_logs_use_ranger",
+    "name": "logsearch_debug_enabled",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
     "index": 8
   },
   {
-    "name": "logsearch_debug_enabled",
+    "name": "logsearch_debug_port",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
     "index": 9
   },
   {
-    "name": "logsearch_debug_port",
+    "name": "logsearch_truststore_location",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
     "index": 10
   },
   {
-    "name": "logsearch_truststore_location",
+    "name": "logsearch_truststore_type",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
     "index": 11
   },
   {
-    "name": "logsearch_truststore_type",
+    "name": "logsearch_truststore_password",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
     "index": 12
   },
   {
-    "name": "logsearch_truststore_password",
+    "name": "logsearch_keystore_location",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
     "index": 13
   },
   {
-    "name": "logsearch_keystore_location",
+    "name": "logsearch_keystore_type",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
     "index": 14
   },
   {
-    "name": "logsearch_keystore_type",
+    "name": "logsearch_keystore_password",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
     "index": 15
   },
   {
-    "name": "logsearch_keystore_password",
+    "name": "logsearch_kerberos_keytab",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
     "index": 16
   },
   {
-    "name": "logsearch_kerberos_keytab",
+    "name": "logsearch_kerberos_principal",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
     "index": 17
   },
   {
-    "name": "logsearch_kerberos_principal",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-env.xml",
-    "category": "Advanced logsearch-env",
-    "index": 18
-  },
-  {
     "name": "content",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-env.xml",
     "category": "Advanced logsearch-env",
-    "index": 19
+    "index": 18
   },
   /*logsearch-log4j*/
   {
@@ -2242,147 +2235,155 @@
     "category": "Advanced logsearch-properties",
     "index": 9
   },
-  /*logsearch-solr-env*/
+  /*infra-solr-client-log4j*/
   {
-    "name": "logsearch_solr_log_dir",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_client_log_dir",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-client-log4j.xml",
+    "category": "Advanced infra-solr-client-log4j",
     "index": 1
   },
   {
-    "name": "logsearch_solr_client_log_dir",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "content",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-client-log4j.xml",
+    "category": "Advanced infra-solr-client-log4j",
+    "index": 2
+  },
+  /*infra-solr-env*/
+  {
+    "name": "infra_solr_log_dir",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
+    "index": 1
+  },
+  {
+    "name": "infra_solr_pid_dir",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 2
   },
   {
-    "name": "logsearch_solr_pid_dir",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_port",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 3
   },
   {
-    "name": "logsearch_solr_port",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_jmx_port",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 4
   },
   {
-    "name": "logsearch_solr_jmx_port",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_ssl_enabled",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 5
   },
   {
-    "name": "logsearch_solr_ssl_enabled",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_truststore_location",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 6
   },
   {
-    "name": "logsearch_solr_truststore_location",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_truststore_type",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 7
   },
   {
-    "name": "logsearch_solr_truststore_type",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_truststore_password",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 8
   },
   {
-    "name": "logsearch_solr_truststore_password",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_keystore_location",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 9
   },
   {
-    "name": "logsearch_solr_keystore_location",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_keystore_type",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 10
   },
   {
-    "name": "logsearch_solr_keystore_type",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_keystore_password",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 11
   },
   {
-    "name": "logsearch_solr_keystore_password",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_kerberos_keytab",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 12
   },
   {
-    "name": "logsearch_solr_kerberos_keytab",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
-    "index": 12
-  },
-  {
-    "name": "logsearch_solr_kerberos_principal",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_kerberos_principal",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 13
   },
   {
-    "name": "logsearch_solr_web_kerberos_keytab",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_web_kerberos_keytab",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 14
   },
   {
-    "name": "logsearch_solr_web_kerberos_principal",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_web_kerberos_principal",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 15
   },
   {
-    "name": "logsearch_solr_kerberos_name_rules",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "name": "infra_solr_kerberos_name_rules",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 16
   },
   {
     "name": "content",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-env.xml",
-    "category": "Advanced logsearch-solr-env",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-env.xml",
+    "category": "Advanced infra-solr-env",
     "index": 17
   },
-  /*logsearch-solr-log4j*/
+  /*infra-solr-log4j*/
   {
     "name": "content",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-log4j.xml",
-    "category": "Advanced logsearch-solr-log4j",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-log4j.xml",
+    "category": "Advanced infra-solr-log4j",
     "index": 1
   },
-  /*logsearch-solr-xml*/
+  /*infra-solr-xml*/
   {
     "name": "content",
-    "serviceName": "LOGSEARCH",
-    "filename": "logsearch-solr-xml.xml",
-    "category": "Advanced logsearch-solr-xml",
+    "serviceName": "AMBARI_INFRA",
+    "filename": "infra-solr-xml.xml",
+    "category": "Advanced infra-solr-xml",
     "index": 1
   },