Merge branch 'develop' into feature/SLIDER-1107_AM_config_generation
Conflicts:
slider-core/src/main/java/org/apache/slider/core/persist/AppDefinitionPersister.java
diff --git a/app-packages/hbase-nopkg/README.md b/app-packages/hbase-nopkg/README.md
new file mode 100644
index 0000000..43fd7d4
--- /dev/null
+++ b/app-packages/hbase-nopkg/README.md
@@ -0,0 +1,56 @@
+<!---
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+# Create Slider App Package for HBase
+
+appConfig-default.json and resources-default.json are not required to be packaged.
+These files are included as reference configuration for Slider apps and are suitable
+for a one-node cluster.
+
+To create the app package you will need the HBase tarball and invoke mvn command
+with appropriate parameters. It is recommended that pkg.version be set to the
+ same value as hbase.version.
+
+Command:
+
+ mvn clean package -Phbase-resources -Dhbase.version=<hbase version> -Dpkg.version=<app package version>
+ -Dpkg.name=<file name of app tarball> -Dpkg.src=<folder location where the pkg is available>
+
+Example:
+
+ mvn clean package -Phbase-resources -Dhbase.version=1.1.4
+ -Dpkg.version=1.1.4 -Dpkg.name=hbase-1.1.4-bin.tar.gz
+ -Dpkg.src=/Users/user1/Downloads
+
+App package can be found in
+
+ app-packages/hbase-nopkg/target/slider-hbase-resources-1.1.4.zip
+
+## Verifying the content
+
+Verify the content using
+
+ zip -Tv slider-hbase-*.zip
+
+## Sample commands
+
+ unzip slider-hbase-resources-1.1.4.zip
+ slider resource --install --resource resources --destdir hbase
+ slider create hbase --template appConfig-default.json --resources resources-default.json --metainfo metainfo.xml
+ slider client --install --dest client_install_dir --name hbase --config clientInstallConfig-default.json
+
+
diff --git a/app-packages/hbase-nopkg/appConfig-default.json b/app-packages/hbase-nopkg/appConfig-default.json
new file mode 100644
index 0000000..1095efd
--- /dev/null
+++ b/app-packages/hbase-nopkg/appConfig-default.json
@@ -0,0 +1,112 @@
+{
+ "schema": "http://example.org/specification/v2.0.0",
+ "metadata": {
+ },
+ "global": {
+ "am.config.generation": "true",
+ "create.default.zookeeper.node": "true",
+ "java_home": "/usr/jdk64/jdk1.8.0_60",
+ "system_configs": "core-site",
+
+ "site.global.app_user": "yarn",
+ "site.global.app_version": "${hbase.version}",
+ "site.global.app_root": "${AGENT_WORK_ROOT}/app/packages/hbase-${hbase.version}",
+
+ "site.global.pid_file": "${@//site/global/app_pid_dir}/hbase-${@//site/global/app_user}-${@//site/global/hbase_component}.pid",
+ "site.global.daemon_cmd": "env HBASE_IDENT_STRING=${@//site/global/app_user} ${@//site/global/app_root}/bin/hbase-daemon.sh --config ${@//site/global/app_conf_dir}",
+
+ "site.global.hbase_instance_name": "instancename",
+ "site.global.hbase_root_password": "secret",
+ "site.global.user_group": "hadoop",
+
+ "site.global.thrift_port": "0",
+ "site.global.thrift2_port": "0",
+ "site.global.rest_port": "0",
+
+ "site.hbase-env.template.file": "hbase-env.sh.j2",
+ "site.hbase-env.java64_home": "${JAVA_HOME}",
+ "site.hbase-env.conf_dir": "${@//site/global/app_conf_dir}",
+ "site.hbase-env.log_dir": "${@//site/global/app_log_dir}",
+ "site.hbase-env.pid_dir": "${@//site/global/app_pid_dir}",
+ "site.hbase-env.master_heapsize": "1024m",
+ "site.hbase-env.regionserver_heapsize": "1024m",
+ "site.hbase-env.regionserver_xmn_size": "512m",
+ "site.hbase-env.hbase_opts": "",
+
+ "site.hbase-site.hbase.rootdir": "${DEFAULT_DATA_DIR}/data",
+ "site.hbase-site.hbase.cluster.distributed": "true",
+ "site.hbase-site.hbase.superuser": "${USER_NAME}",
+ "site.hbase-site.hbase.tmp.dir": "work/app/tmp",
+ "site.hbase-site.hbase.local.dir": "${hbase.tmp.dir}/local",
+ "site.hbase-site.hbase.zookeeper.quorum": "${ZK_HOST}",
+ "site.hbase-site.zookeeper.znode.parent": "${DEFAULT_ZK_PATH}",
+ "site.hbase-site.hbase.regionserver.info.port": "0",
+ "site.hbase-site.hbase.bulkload.staging.dir": "/user/${USER_NAME}/hbase-staging",
+ "site.hbase-site.hbase.coprocessor.region.classes": "org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint",
+ "site.hbase-site.hbase.master.info.bindAddress": "0.0.0.0",
+ "site.hbase-site.hbase.master.info.port": "16010",
+ "site.hbase-site.hbase.regionserver.port": "0",
+ "site.hbase-site.hbase.master.port": "0",
+ "site.hbase-site.hbase.regionserver.global.memstore.upperLimit": "0.4",
+ "site.hbase-site.hbase.regionserver.handler.count": "60",
+ "site.hbase-site.hbase.hregion.majorcompaction": "86400000",
+ "site.hbase-site.hbase.regionserver.global.memstore.lowerLimit": "0.38",
+ "site.hbase-site.hbase.hregion.memstore.block.multiplier": "2",
+ "site.hbase-site.hbase.hregion.memstore.flush.size": "134217728",
+ "site.hbase-site.hbase.hregion.memstore.mslab.enabled": "true",
+ "site.hbase-site.hbase.hregion.max.filesize": "10737418240",
+ "site.hbase-site.hbase.client.scanner.caching": "100",
+ "site.hbase-site.zookeeper.session.timeout": "30000",
+ "site.hbase-site.hbase.client.keyvalue.maxsize": "10485760",
+ "site.hbase-site.hbase.hstore.compactionThreshold": "3",
+ "site.hbase-site.hbase.hstore.flush.retries.number": "120",
+ "site.hbase-site.hbase.hstore.blockingStoreFiles": "10",
+ "site.hbase-site.hfile.block.cache.size": "0.40",
+ "site.hbase-site.hbase.zookeeper.property.clientPort": "2181",
+ "site.hbase-site.hbase.zookeeper.useMulti": "true",
+ "site.hbase-site.hbase.defaults.for.version.skip": "true",
+ "site.hbase-site.dfs.domain.socket.path": "/var/lib/hadoop-hdfs/dn_socket",
+
+ "site.hbase-site.hbase.security.authentication": "simple",
+ "site.hbase-site.hbase.security.authorization": "false",
+ "site.hbase-site.hbase.coprocessor.region.classes": "",
+ "site.hbase-site.hbase.coprocessor.master.classes": "",
+
+ "site.hbase-policy.security.client.protocol.acl": "*",
+ "site.hbase-policy.security.admin.protocol.acl": "*",
+ "site.hbase-policy.security.masterregion.protocol.acl": "*",
+
+ "site.hbase-log4j.template.file": "hbase-log4j.properties.j2",
+
+ "site.hbase-metrics2.template.file": "hadoop-metrics2-hbase.properties-RS.j2",
+ "site.hbase-metrics2.app_name": "${CLUSTER_NAME}",
+ "site.hbase-metrics2.metric_collector_host": "${NN_HOST}",
+ "site.hbase-metrics2.metric_collector_port": "6188",
+ "site.hbase-metrics2.metric_collector_lib": ""
+ },
+ "components": {
+ "slider-appmaster": {
+ "jvm.heapsize": "1024M"
+ },
+ "HBASE_MASTER": {
+ "site.global.hbase_component": "master",
+ "site.hbase-metrics2.template.file": "hadoop-metrics2-hbase.properties-MASTER.j2"
+ },
+ "HBASE_REGIONSERVER": {
+ "site.global.hbase_component": "regionserver"
+ },
+ "HBASE_REST": {
+ "site.global.hbase_component": "rest"
+ },
+ "HBASE_THRIFT": {
+ "site.global.hbase_component": "thrift"
+ },
+ "HBASE_THRIFT2": {
+ "site.global.hbase_component": "thrift2"
+ },
+ "HBASE_CLIENT": {
+ "site.hbase-env.template.file": "hbase-env-client.sh.j2",
+ "site.hbase-site.instance": "${CLUSTER_NAME}"
+ }
+ }
+}
diff --git a/app-packages/hbase-nopkg/appConfig-secured-default.json b/app-packages/hbase-nopkg/appConfig-secured-default.json
new file mode 100644
index 0000000..ac6e45b
--- /dev/null
+++ b/app-packages/hbase-nopkg/appConfig-secured-default.json
@@ -0,0 +1,128 @@
+{
+ "schema": "http://example.org/specification/v2.0.0",
+ "metadata": {
+ },
+ "global": {
+ "am.config.generation": "true",
+ "create.default.zookeeper.node": "true",
+ "java_home": "/usr/jdk64/jdk1.8.0_60",
+ "system_configs": "core-site,hdfs-site",
+
+ "site.global.app_user": "${USER_NAME}",
+ "site.global.app_version": "${hbase.version}",
+ "site.global.app_root": "${AGENT_WORK_ROOT}/app/packages/hbase-${hbase.version}",
+
+ "site.global.pid_file": "${@//site/global/app_pid_dir}/hbase-${@//site/global/app_user}-${@//site/global/hbase_component}.pid",
+ "site.global.daemon_cmd": "env HBASE_IDENT_STRING=${@//site/global/app_user} ${@//site/global/app_root}/bin/hbase-daemon.sh --config ${@//site/global/app_conf_dir}",
+
+ "site.global.hbase_instance_name": "instancename",
+ "site.global.hbase_root_password": "secret",
+ "site.global.user_group": "hadoop",
+
+ "site.global.thrift_port": "0",
+ "site.global.thrift2_port": "0",
+ "site.global.rest_port": "0",
+
+ "site.hbase-env.template.file": "hbase-env-secured.sh.j2",
+ "site.hbase-env.java64_home": "${JAVA_HOME}",
+ "site.hbase-env.conf_dir": "${@//site/global/app_conf_dir}",
+ "site.hbase-env.log_dir": "${@//site/global/app_log_dir}",
+ "site.hbase-env.pid_dir": "${@//site/global/app_pid_dir}",
+ "site.hbase-env.master_heapsize": "1024m",
+ "site.hbase-env.regionserver_heapsize": "1024m",
+ "site.hbase-env.regionserver_xmn_size": "512m",
+ "site.hbase-env.hbase_opts": "",
+ "site.hbase-env.client_jaas_config_file": "${@//site/hbase-env/conf_dir}/hbase_client_jaas.conf",
+ "site.hbase-env.master_jaas_config_file": "${@//site/hbase-env/conf_dir}/hbase_master_jaas.conf",
+ "site.hbase-env.regionserver_jaas_config_file": "${@//site/hbase-env/conf_dir}/hbase_regionserver_jaas.conf",
+
+ "site.hbase-site.hbase.rootdir": "${DEFAULT_DATA_DIR}",
+ "site.hbase-site.hbase.cluster.distributed": "true",
+ "site.hbase-site.hbase.superuser": "${USER_NAME}",
+ "site.hbase-site.hbase.tmp.dir": "${AGENT_WORK_ROOT}/work/app/tmp",
+ "site.hbase-site.hbase.local.dir": "${hbase.tmp.dir}/local",
+ "site.hbase-site.hbase.zookeeper.quorum": "${ZK_HOST}",
+ "site.hbase-site.zookeeper.znode.parent": "${DEFAULT_ZK_PATH}",
+ "site.hbase-site.hbase.regionserver.info.port": "0",
+ "site.hbase-site.hbase.master.info.port": "0",
+ "site.hbase-site.hbase.regionserver.port": "0",
+ "site.hbase-site.hbase.master.port": "0",
+ "site.hbase-site.hbase.regionserver.global.memstore.upperLimit": "0.4",
+ "site.hbase-site.hbase.regionserver.handler.count": "60",
+ "site.hbase-site.hbase.hregion.majorcompaction": "86400000",
+ "site.hbase-site.hbase.regionserver.global.memstore.lowerLimit": "0.38",
+ "site.hbase-site.hbase.hregion.memstore.block.multiplier": "2",
+ "site.hbase-site.hbase.hregion.memstore.flush.size": "134217728",
+ "site.hbase-site.hbase.hregion.memstore.mslab.enabled": "true",
+ "site.hbase-site.hbase.hregion.max.filesize": "10737418240",
+ "site.hbase-site.hbase.client.scanner.caching": "100",
+ "site.hbase-site.zookeeper.session.timeout": "30000",
+ "site.hbase-site.hbase.client.keyvalue.maxsize": "10485760",
+ "site.hbase-site.hbase.hstore.compactionThreshold": "3",
+ "site.hbase-site.hbase.hstore.flush.retries.number": "120",
+ "site.hbase-site.hbase.hstore.blockingStoreFiles": "10",
+ "site.hbase-site.hfile.block.cache.size": "0.40",
+ "site.hbase-site.hbase.zookeeper.property.clientPort": "2181",
+ "site.hbase-site.hbase.zookeeper.useMulti": "true",
+ "site.hbase-site.hbase.defaults.for.version.skip": "true",
+ "site.hbase-site.dfs.domain.socket.path": "/var/lib/hadoop-hdfs/dn_socket",
+
+ "site.hbase-site.hbase.security.authentication": "kerberos",
+ "site.core-site.hadoop.security.authentication": "kerberos",
+ "site.hbase-site.hbase.security.authorization": "true",
+ "site.hbase-site.hbase.security.access.early_out": "true",
+ "site.hbase-site.hbase.coprocessor.master.classes": "org.apache.hadoop.hbase.security.access.AccessController",
+ "site.hbase-site.hbase.coprocessor.region.classes": "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.AccessController,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint",
+ "site.hbase-site.hbase.regionserver.kerberos.principal": "${USER_NAME}/${THIS_HOST}@EXAMPLE.COM",
+ "site.hbase-site.hbase.regionserver.keytab.file": "${AGENT_WORK_ROOT}/keytabs/${USER_NAME}.HBASE.service.keytab",
+ "site.hbase-site.hbase.master.kerberos.principal": "${USER_NAME}/${THIS_HOST}@EXAMPLE.COM",
+ "site.hbase-site.hbase.master.keytab.file": "${AGENT_WORK_ROOT}/keytabs/${USER_NAME}.HBASE.service.keytab",
+ "site.hbase-site.hbase.rest.kerberos.principal": "${USER_NAME}/${THIS_HOST}@EXAMPLE.COM",
+ "site.hbase-site.hbase.rest.keytab.file": "${AGENT_WORK_ROOT}/keytabs/${USER_NAME}.HBASE.service.keytab",
+ "site.hbase-site.hbase.thrift.kerberos.principal": "${USER_NAME}/${THIS_HOST}@EXAMPLE.COM",
+ "site.hbase-site.hbase.thrift.keytab.file": "${AGENT_WORK_ROOT}/keytabs/${USER_NAME}.HBASE.service.keytab",
+
+ "site.hbase-metrics2.template.file": "hadoop-metrics2-hbase.properties-RS.j2",
+ "site.hbase-metrics2.app_name": "${CLUSTER_NAME}",
+ "site.hbase-metrics2.metric_collector_host": "${NN_HOST}",
+ "site.hbase-metrics2.metric_collector_port": "6188",
+ "site.hbase-metrics2.metric_collector_lib": "",
+
+ "site.hbase-jaas.master_keytab_path": "${@//site/hbase-site/hbase.master.keytab.file}",
+ "site.hbase-jaas.master_jaas_princ": "${@//site/hbase-site/hbase.master.kerberos.principal}",
+ "site.hbase-jaas.regionserver_keytab_path": "${@//site/hbase-site/hbase.regionserver.keytab.file}",
+ "site.hbase-jaas.regionserver_jaas_princ": "${@//site/hbase-site/hbase.regionserver.kerberos.principal}"
+ },
+ "components": {
+ "slider-appmaster": {
+ "jvm.heapsize": "1024M",
+ "slider.hdfs.keytab.dir": ".slider/keytabs/hbase",
+ "slider.am.login.keytab.name": "${USER_NAME}.headless.keytab",
+ "slider.keytab.principal.name": "${USER_NAME}"
+ },
+ "HBASE_MASTER": {
+ "site.global.hbase_component": "master",
+ "site.hbase-metrics2.template.file": "hadoop-metrics2-hbase.properties-MASTER.j2",
+ "site.hbase-jaas.template.file": "hbase_master_jaas.conf.j2"
+ },
+ "HBASE_REGIONSERVER": {
+ "site.global.hbase_component": "regionserver",
+ "site.hbase-jaas.template.file": "hbase_regionserver_jaas.conf.j2"
+ },
+ "HBASE_REST": {
+ "site.global.hbase_component": "rest",
+ "site.hbase-jaas.template.file": "hbase_regionserver_jaas.conf.j2"
+ },
+ "HBASE_THRIFT": {
+ "site.global.hbase_component": "thrift",
+ "site.hbase-jaas.template.file": "hbase_regionserver_jaas.conf.j2"
+ },
+ "HBASE_THRIFT2": {
+ "site.global.hbase_component": "thrift2",
+ "site.hbase-jaas.template.file": "hbase_regionserver_jaas.conf.j2"
+ },
+ "HBASE_CLIENT": {
+ "site.hbase-jaas.template.file": "hbase_client_jaas.conf.j2"
+ }
+ }
+}
diff --git a/app-packages/hbase-nopkg/clientInstallConfig-default.json b/app-packages/hbase-nopkg/clientInstallConfig-default.json
new file mode 100644
index 0000000..f195e7f
--- /dev/null
+++ b/app-packages/hbase-nopkg/clientInstallConfig-default.json
@@ -0,0 +1,6 @@
+{
+ "schema":"http://example.org/specification/v2.0.0",
+ "global":{
+ "client_root": "hbase-${hbase.version}"
+ }
+}
diff --git a/app-packages/hbase-nopkg/metainfo-secured.xml b/app-packages/hbase-nopkg/metainfo-secured.xml
new file mode 100644
index 0000000..bc5759e
--- /dev/null
+++ b/app-packages/hbase-nopkg/metainfo-secured.xml
@@ -0,0 +1,255 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <application>
+ <name>HBASE</name>
+ <comment>
+ Apache HBase is the Hadoop database, a distributed, scalable, big data store.
+ Requirements:
+ 1. Ensure parent dir for path (hbase-site/hbase.rootdir) is accessible to the App owner.
+ 2. Ensure ZK root (hbase-site/zookeeper.znode.parent) is unique for the App instance.
+ </comment>
+ <version>${pkg.version}</version>
+ <type>YARN-APP</type>
+ <minHadoopVersion>2.1.0</minHadoopVersion>
+ <exportedConfigs>hbase-site</exportedConfigs>
+ <exportGroups>
+ <exportGroup>
+ <name>QuickLinks</name>
+ <exports>
+ <export>
+ <name>org.apache.slider.jmx</name>
+ <value>http://${HBASE_MASTER_HOST}:${site.hbase-site.hbase.master.info.port}/jmx</value>
+ </export>
+ <export>
+ <name>org.apache.slider.monitor</name>
+ <value>http://${HBASE_MASTER_HOST}:${site.hbase-site.hbase.master.info.port}/master-status</value>
+ </export>
+ <export>
+ <name>org.apache.slider.hbase.rest</name>
+ <value>http://${HBASE_REST_HOST}:${site.global.rest_port}</value>
+ </export>
+ <export>
+ <name>org.apache.slider.hbase.thrift2</name>
+ <value>http://${HBASE_THRIFT2_HOST}:${site.global.thrift2_port}</value>
+ </export>
+ <export>
+ <name>org.apache.slider.hbase.thrift</name>
+ <value>http://${HBASE_THRIFT_HOST}:${site.global.thrift_port}</value>
+ </export>
+ <export>
+ <name>org.apache.slider.metrics</name>
+ <value>http://${site.hbase-metrics2.metric_collector_host}:${site.hbase-metrics2.metric_collector_port}/ws/v1/timeline/metrics</value>
+ </export>
+ <export>
+ <name>org.apache.slider.hbase.zk-path</name>
+ <value>${site.hbase-site.hbase.zookeeper.quorum}:${site.hbase-site.zookeeper.znode.parent}</value>
+ </export>
+ </exports>
+ </exportGroup>
+ </exportGroups>
+ <commandOrders>
+ <commandOrder>
+ <command>HBASE_REGIONSERVER-START</command>
+ <requires>HBASE_MASTER-STARTED</requires>
+ </commandOrder>
+ <commandOrder>
+ <command>HBASE_MASTER-START</command>
+ <requires>HBASE_REST-INSTALLED</requires>
+ <requires>HBASE_THRIFT-INSTALLED</requires>
+ <requires>HBASE_THRIFT2-INSTALLED</requires>
+ </commandOrder>
+ </commandOrders>
+ <components>
+ <component>
+ <name>HBASE_MASTER</name>
+ <category>MASTER</category>
+ <minInstanceCount>1</minInstanceCount>
+ <appExports>QuickLinks-org.apache.slider.jmx,QuickLinks-org.apache.slider.monitor,QuickLinks-org.apache.slider.metrics,QuickLinks-org.apache.slider.hbase.zk-path</appExports>
+ <componentExports>
+ <componentExport>
+ <name>org.apache.slider.jmx</name>
+ <value>${THIS_HOST}:${site.hbase-site.hbase.master.info.port}/jmx</value>
+ </componentExport>
+ <componentExport>
+ <name>org.apache.slider.monitor</name>
+ <value>${THIS_HOST}:${site.hbase-site.hbase.master.info.port}/master-status</value>
+ </componentExport>
+ <componentExport>
+ <name>org.apache.slider.hbase.zk-path</name>
+ <value>${site.hbase-site.hbase.zookeeper.quorum}:${site.hbase-site.zookeeper.znode.parent}</value>
+ </componentExport>
+ </componentExports>
+ <commands>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} start master</exec>
+ </command>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} stop master</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ <configFile>
+ <type>template</type>
+ <fileName>app/conf/hbase_master_jaas.conf</fileName>
+ <dictionaryName>hbase-jaas</dictionaryName>
+ </configFile>
+ </component>
+
+ <component>
+ <name>HBASE_REGIONSERVER</name>
+ <category>SLAVE</category>
+ <minInstanceCount>1</minInstanceCount>
+ <commands>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} start regionserver</exec>
+ </command>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} stop regionserver</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ <configFile>
+ <type>template</type>
+ <fileName>app/conf/hbase_regionserver_jaas.conf</fileName>
+ <dictionaryName>hbase-jaas</dictionaryName>
+ </configFile>
+ </component>
+
+ <component>
+ <name>HBASE_REST</name>
+ <category>MASTER</category>
+ <appExports>QuickLinks-org.apache.slider.hbase.rest</appExports>
+ <commands>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} start rest -p {$conf:@//site/global/rest_port}</exec>
+ </command>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} stop rest</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ <configFile>
+ <type>template</type>
+ <fileName>app/conf/hbase_rest_jaas.conf</fileName>
+ <dictionaryName>hbase-jaas</dictionaryName>
+ </configFile>
+ </component>
+
+ <component>
+ <name>HBASE_THRIFT</name>
+ <category>MASTER</category>
+ <appExports>QuickLinks-org.apache.slider.hbase.thrift</appExports>
+ <commands>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} start thrift -p {$conf:@//site/global/thrift_port}</exec>
+ </command>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} stop thrift</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ <configFile>
+ <type>template</type>
+ <fileName>app/conf/hbase_thrift2_jaas.conf</fileName>
+ <dictionaryName>hbase-jaas</dictionaryName>
+ </configFile>
+ </component>
+
+ <component>
+ <name>HBASE_THRIFT2</name>
+ <category>MASTER</category>
+ <minInstanceCount>0</minInstanceCount>
+ <appExports>QuickLinks-org.apache.slider.hbase.thrift2</appExports>
+ <commands>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} start thrift2 -p {$conf:@//site/global/thrift_port}</exec>
+ </command>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} stop thrift2</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ <configFile>
+ <type>template</type>
+ <fileName>app/conf/hbase_thrift_jaas.conf</fileName>
+ <dictionaryName>hbase-jaas</dictionaryName>
+ </configFile>
+ </component>
+
+ <component>
+ <name>HBASE_CLIENT</name>
+ <category>CLIENT</category>
+ <configFile>
+ <type>template</type>
+ <fileName>app/conf/hbase_client_jaas.conf</fileName>
+ <dictionaryName>hbase-jaas</dictionaryName>
+ </configFile>
+ </component>
+ </components>
+
+ <osSpecifics>
+ <osSpecific>
+ <osType>any</osType>
+ <packages>
+ <package>
+ <type>tarball</type>
+ <name>files/hbase-${pkg.version}.tar.gz</name>
+ </package>
+ </packages>
+ </osSpecific>
+ </osSpecifics>
+
+ <packages>
+ <package>
+ <type>tarball</type>
+ <name>hbase-${pkg.version}-bin.tar.gz</name>
+ </package>
+ </packages>
+
+ <configFiles>
+ <configFile>
+ <type>xml</type>
+ <fileName>app/conf/hbase-site.xml</fileName>
+ <dictionaryName>hbase-site</dictionaryName>
+ </configFile>
+ <configFile>
+ <type>template</type>
+ <fileName>app/conf/hbase-env.sh</fileName>
+ <dictionaryName>hbase-env</dictionaryName>
+ </configFile>
+ <configFile>
+ <type>template</type>
+ <fileName>app/conf/log4j.properties</fileName>
+ <dictionaryName>hbase-log4j</dictionaryName>
+ </configFile>
+ <configFile>
+ <type>xml</type>
+ <fileName>app/conf/hbase-policy.xml</fileName>
+ <dictionaryName>hbase-policy</dictionaryName>
+ </configFile>
+ <configFile>
+ <type>template</type>
+ <fileName>app/conf/hadoop-metrics2-hbase.properties</fileName>
+ <dictionaryName>hbase-metrics2</dictionaryName>
+ </configFile>
+ </configFiles>
+
+ </application>
+</metainfo>
diff --git a/app-packages/hbase-nopkg/metainfo.xml b/app-packages/hbase-nopkg/metainfo.xml
new file mode 100644
index 0000000..60ef10b
--- /dev/null
+++ b/app-packages/hbase-nopkg/metainfo.xml
@@ -0,0 +1,225 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <application>
+ <name>HBASE</name>
+ <comment>
+ Apache HBase is the Hadoop database, a distributed, scalable, big data store.
+ Requirements:
+ 1. Ensure parent dir for path (hbase-site/hbase.rootdir) is accessible to the App owner.
+ 2. Ensure ZK root (hbase-site/zookeeper.znode.parent) is unique for the App instance.
+ </comment>
+ <version>${pkg.version}</version>
+ <type>YARN-APP</type>
+ <minHadoopVersion>2.1.0</minHadoopVersion>
+ <exportedConfigs>hbase-site</exportedConfigs>
+ <exportGroups>
+ <exportGroup>
+ <name>QuickLinks</name>
+ <exports>
+ <export>
+ <name>org.apache.slider.jmx</name>
+ <value>http://${HBASE_MASTER_HOST}:${site.hbase-site.hbase.master.info.port}/jmx</value>
+ </export>
+ <export>
+ <name>org.apache.slider.monitor</name>
+ <value>http://${HBASE_MASTER_HOST}:${site.hbase-site.hbase.master.info.port}/master-status</value>
+ </export>
+ <export>
+ <name>org.apache.slider.hbase.rest</name>
+ <value>http://${HBASE_REST_HOST}:${site.global.rest_port}</value>
+ </export>
+ <export>
+ <name>org.apache.slider.hbase.thrift2</name>
+ <value>http://${HBASE_THRIFT2_HOST}:${site.global.thrift2_port}</value>
+ </export>
+ <export>
+ <name>org.apache.slider.hbase.thrift</name>
+ <value>http://${HBASE_THRIFT_HOST}:${site.global.thrift_port}</value>
+ </export>
+ <export>
+ <name>org.apache.slider.metrics</name>
+ <value>http://${site.hbase-metrics2.metric_collector_host}:${site.hbase-metrics2.metric_collector_port}/ws/v1/timeline/metrics</value>
+ </export>
+ <export>
+ <name>org.apache.slider.hbase.zk-path</name>
+ <value>${site.hbase-site.hbase.zookeeper.quorum}:${site.hbase-site.zookeeper.znode.parent}</value>
+ </export>
+ </exports>
+ </exportGroup>
+ </exportGroups>
+ <commandOrders>
+ <commandOrder>
+ <command>HBASE_REGIONSERVER-START</command>
+ <requires>HBASE_MASTER-STARTED</requires>
+ </commandOrder>
+ <commandOrder>
+ <command>HBASE_MASTER-START</command>
+ <requires>HBASE_REST-INSTALLED</requires>
+ <requires>HBASE_THRIFT-INSTALLED</requires>
+ <requires>HBASE_THRIFT2-INSTALLED</requires>
+ </commandOrder>
+ </commandOrders>
+ <components>
+ <component>
+ <name>HBASE_MASTER</name>
+ <category>MASTER</category>
+ <minInstanceCount>1</minInstanceCount>
+ <appExports>QuickLinks-org.apache.slider.jmx,QuickLinks-org.apache.slider.monitor,QuickLinks-org.apache.slider.metrics,QuickLinks-org.apache.slider.hbase.zk-path</appExports>
+ <componentExports>
+ <componentExport>
+ <name>org.apache.slider.jmx</name>
+ <value>${THIS_HOST}:${site.hbase-site.hbase.master.info.port}/jmx</value>
+ </componentExport>
+ <componentExport>
+ <name>org.apache.slider.monitor</name>
+ <value>${THIS_HOST}:${site.hbase-site.hbase.master.info.port}/master-status</value>
+ </componentExport>
+ <componentExport>
+ <name>org.apache.slider.hbase.zk-path</name>
+ <value>${site.hbase-site.hbase.zookeeper.quorum}:${site.hbase-site.zookeeper.znode.parent}</value>
+ </componentExport>
+ </componentExports>
+ <commands>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} start master</exec>
+ </command>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} stop master</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ </component>
+
+ <component>
+ <name>HBASE_REGIONSERVER</name>
+ <category>SLAVE</category>
+ <minInstanceCount>1</minInstanceCount>
+ <commands>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} start regionserver</exec>
+ </command>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} stop regionserver</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ </component>
+
+ <component>
+ <name>HBASE_REST</name>
+ <category>MASTER</category>
+ <appExports>QuickLinks-org.apache.slider.hbase.rest</appExports>
+ <commands>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} start rest -p {$conf:@//site/global/rest_port}</exec>
+ </command>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} stop rest</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ </component>
+
+ <component>
+ <name>HBASE_THRIFT</name>
+ <category>MASTER</category>
+ <appExports>QuickLinks-org.apache.slider.hbase.thrift</appExports>
+ <commands>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} start thrift -p {$conf:@//site/global/thrift_port}</exec>
+ </command>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} stop thrift</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ </component>
+
+ <component>
+ <name>HBASE_THRIFT2</name>
+ <category>MASTER</category>
+ <minInstanceCount>0</minInstanceCount>
+ <appExports>QuickLinks-org.apache.slider.hbase.thrift2</appExports>
+ <commands>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} start thrift2 -p {$conf:@//site/global/thrift_port}</exec>
+ </command>
+ <command>
+ <exec>{$conf:@//site/global/daemon_cmd} stop thrift2</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ </component>
+
+ <component>
+ <name>HBASE_CLIENT</name>
+ <category>CLIENT</category>
+ </component>
+ </components>
+
+ <osSpecifics>
+ <osSpecific>
+ <osType>any</osType>
+ <packages>
+ <package>
+ <type>tarball</type>
+ <name>files/hbase-${pkg.version}.tar.gz</name>
+ </package>
+ </packages>
+ </osSpecific>
+ </osSpecifics>
+
+ <packages>
+ <package>
+ <type>archive</type>
+ <name>hbase-${pkg.version}-bin.tar.gz</name>
+ </package>
+ </packages>
+
+ <configFiles>
+ <configFile>
+ <type>xml</type>
+ <fileName>hbase-site.xml</fileName>
+ <dictionaryName>hbase-site</dictionaryName>
+ </configFile>
+ <configFile>
+ <type>template</type>
+ <fileName>hbase-env.sh</fileName>
+ <dictionaryName>hbase-env</dictionaryName>
+ </configFile>
+ <configFile>
+ <type>template</type>
+ <fileName>log4j.properties</fileName>
+ <dictionaryName>hbase-log4j</dictionaryName>
+ </configFile>
+ <configFile>
+ <type>xml</type>
+ <fileName>hbase-policy.xml</fileName>
+ <dictionaryName>hbase-policy</dictionaryName>
+ </configFile>
+ <configFile>
+ <type>template</type>
+ <fileName>hadoop-metrics2-hbase.properties</fileName>
+ <dictionaryName>hbase-metrics2</dictionaryName>
+ </configFile>
+ </configFiles>
+
+ </application>
+</metainfo>
diff --git a/app-packages/hbase-nopkg/pom.xml b/app-packages/hbase-nopkg/pom.xml
new file mode 100644
index 0000000..fcba6f2
--- /dev/null
+++ b/app-packages/hbase-nopkg/pom.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+ <parent>
+ <groupId>org.apache.slider.packages</groupId>
+ <artifactId>slider-app-packages</artifactId>
+ <version>0.91.0-incubating-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>slider-hbase-resources</artifactId>
+ <packaging>pom</packaging>
+ <name>Slider HBase Resources</name>
+ <description>Slider HBase Resources</description>
+ <properties>
+ <work.dir>package-tmp</work.dir>
+ <app.package.name>${project.artifactId}-${pkg.version}</app.package.name>
+ </properties>
+
+ <profiles>
+ <profile>
+ <id>hbase-resources</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <version>${maven-antrun-plugin.version}</version>
+ <executions>
+ <execution>
+ <id>copy</id>
+ <phase>validate</phase>
+ <configuration>
+ <target name="copy and rename file">
+ <copy file="${pkg.src}/${pkg.name}" tofile="${project.build.directory}/${pkg.name}" />
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>${maven-assembly-plugin.version}</version>
+ <configuration>
+ <tarLongFileMode>gnu</tarLongFileMode>
+ <descriptor>src/assembly/hbase.xml</descriptor>
+ <appendAssemblyId>false</appendAssemblyId>
+ <finalName>${app.package.name}</finalName>
+ </configuration>
+ <executions>
+ <execution>
+ <id>build-tarball</id>
+ <phase>package</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
+
+ <build>
+ </build>
+
+ <dependencies>
+ </dependencies>
+
+</project>
diff --git a/app-packages/hbase-nopkg/resources-default.json b/app-packages/hbase-nopkg/resources-default.json
new file mode 100644
index 0000000..de3b279
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources-default.json
@@ -0,0 +1,47 @@
+{
+ "schema": "http://example.org/specification/v2.0.0",
+ "metadata": {
+ },
+ "global": {
+ "yarn.log.include.patterns": "",
+ "yarn.log.exclude.patterns": "",
+ "yarn.component.instances": "1",
+ "yarn.vcores": "1"
+ },
+ "components": {
+ "slider-appmaster": {
+ "yarn.memory": "1024"
+ },
+ "HBASE_MASTER": {
+ "yarn.role.priority": "1",
+ "yarn.component.instances": "1",
+ "yarn.placement.escalate.seconds": "10",
+ "yarn.memory": "1500"
+ },
+ "HBASE_REGIONSERVER": {
+ "yarn.role.priority": "2",
+ "yarn.component.instances": "1",
+ "yarn.memory": "1500",
+ "yarn.container.failure.threshold": "15",
+ "yarn.placement.escalate.seconds": "60"
+ },
+ "HBASE_REST": {
+ "yarn.role.priority": "3",
+ "yarn.component.instances": "1",
+ "yarn.component.placement.policy": "1",
+ "yarn.memory": "556"
+ },
+ "HBASE_THRIFT": {
+ "yarn.role.priority": "4",
+ "yarn.component.instances": "1",
+ "yarn.component.placement.policy": "1",
+ "yarn.memory": "556"
+ },
+ "HBASE_THRIFT2": {
+ "yarn.role.priority": "5",
+ "yarn.component.instances": "1",
+ "yarn.component.placement.policy": "1",
+ "yarn.memory": "556"
+ }
+ }
+}
diff --git a/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2 b/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
new file mode 100644
index 0000000..3ed7af2
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# See http://wiki.apache.org/hadoop/GangliaMetrics
+#
+# Make sure you know whether you are using ganglia 3.0 or 3.1.
+# If 3.1, you will have to patch your hadoop instance with HADOOP-4675
+# And, yes, this file is named hadoop-metrics.properties rather than
+# hbase-metrics.properties because we're leveraging the hadoop metrics
+# package and hadoop-metrics.properties is an hardcoded-name, at least
+# for the moment.
+#
+# See also http://hadoop.apache.org/hbase/docs/current/metrics.html
+
+# HBase-specific configuration to reset long-running stats (e.g. compactions)
+# If this variable is left out, then the default is no expiration.
+hbase.extendedperiod = 3600
+
+# Configuration of the "hbase" context for ganglia
+# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
+# hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext
+hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
+hbase.period=10
+hbase.servers={{metric_collector_host}}:{{metric_collector_port}}
+
+# Configuration of the "jvm" context for ganglia
+# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
+# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
+jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
+jvm.period=10
+jvm.servers={{metric_collector_host}}:{{metric_collector_port}}
+
+# Configuration of the "rpc" context for ganglia
+# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
+# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext
+rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
+rpc.period=10
+rpc.servers={{metric_collector_host}}:{{metric_collector_port}}
+
+#Ganglia following hadoop example
+hbase.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31
+hbase.sink.ganglia.period=10
+
+# default for supportsparse is false
+*.sink.ganglia.supportsparse=true
+
+.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
+.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
+
+hbase.sink.ganglia.servers={{metric_collector_host}}:{{metric_collector_port}}
diff --git a/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2 b/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
new file mode 100644
index 0000000..3ed7af2
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# See http://wiki.apache.org/hadoop/GangliaMetrics
+#
+# Make sure you know whether you are using ganglia 3.0 or 3.1.
+# If 3.1, you will have to patch your hadoop instance with HADOOP-4675
+# And, yes, this file is named hadoop-metrics.properties rather than
+# hbase-metrics.properties because we're leveraging the hadoop metrics
+# package and hadoop-metrics.properties is an hardcoded-name, at least
+# for the moment.
+#
+# See also http://hadoop.apache.org/hbase/docs/current/metrics.html
+
+# HBase-specific configuration to reset long-running stats (e.g. compactions)
+# If this variable is left out, then the default is no expiration.
+hbase.extendedperiod = 3600
+
+# Configuration of the "hbase" context for ganglia
+# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
+# hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext
+hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
+hbase.period=10
+hbase.servers={{metric_collector_host}}:{{metric_collector_port}}
+
+# Configuration of the "jvm" context for ganglia
+# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
+# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
+jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
+jvm.period=10
+jvm.servers={{metric_collector_host}}:{{metric_collector_port}}
+
+# Configuration of the "rpc" context for ganglia
+# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
+# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext
+rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
+rpc.period=10
+rpc.servers={{metric_collector_host}}:{{metric_collector_port}}
+
+#Ganglia following hadoop example
+hbase.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31
+hbase.sink.ganglia.period=10
+
+# default for supportsparse is false
+*.sink.ganglia.supportsparse=true
+
+.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
+.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
+
+hbase.sink.ganglia.servers={{metric_collector_host}}:{{metric_collector_port}}
diff --git a/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-MASTER.j2 b/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-MASTER.j2
new file mode 100644
index 0000000..5473ff5
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-MASTER.j2
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# See http://wiki.apache.org/hadoop/GangliaMetrics
+#
+# Make sure you know whether you are using ganglia 3.0 or 3.1.
+# If 3.1, you will have to patch your hadoop instance with HADOOP-4675
+# And, yes, this file is named hadoop-metrics.properties rather than
+# hbase-metrics.properties because we're leveraging the hadoop metrics
+# package and hadoop-metrics.properties is an hardcoded-name, at least
+# for the moment.
+#
+# See also http://hadoop.apache.org/hbase/docs/current/metrics.html
+
+# HBase-specific configuration to reset long-running stats (e.g. compactions)
+# If this variable is left out, then the default is no expiration.
+hbase.extendedperiod = 3600
+
+*.timeline.plugin.urls={{metric_collector_lib}}
+hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+hbase.period=10
+hbase.collector={{metric_collector_host}}:{{metric_collector_port}}
+
+jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+jvm.period=10
+jvm.collector={{metric_collector_host}}:{{metric_collector_port}}
+
+rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+rpc.period=10
+rpc.collector={{metric_collector_host}}:{{metric_collector_port}}
+
+hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+hbase.sink.timeline.period=10
+hbase.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
+hbase.sink.timeline.serviceName={{app_name}}
diff --git a/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-RS.j2 b/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-RS.j2
new file mode 100644
index 0000000..5473ff5
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hadoop-metrics2-hbase.properties-RS.j2
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# See http://wiki.apache.org/hadoop/GangliaMetrics
+#
+# Make sure you know whether you are using ganglia 3.0 or 3.1.
+# If 3.1, you will have to patch your hadoop instance with HADOOP-4675
+# And, yes, this file is named hadoop-metrics.properties rather than
+# hbase-metrics.properties because we're leveraging the hadoop metrics
+# package and hadoop-metrics.properties is an hardcoded-name, at least
+# for the moment.
+#
+# See also http://hadoop.apache.org/hbase/docs/current/metrics.html
+
+# HBase-specific configuration to reset long-running stats (e.g. compactions)
+# If this variable is left out, then the default is no expiration.
+hbase.extendedperiod = 3600
+
+*.timeline.plugin.urls={{metric_collector_lib}}
+hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+hbase.period=10
+hbase.collector={{metric_collector_host}}:{{metric_collector_port}}
+
+jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+jvm.period=10
+jvm.collector={{metric_collector_host}}:{{metric_collector_port}}
+
+rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+rpc.period=10
+rpc.collector={{metric_collector_host}}:{{metric_collector_port}}
+
+hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+hbase.sink.timeline.period=10
+hbase.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
+hbase.sink.timeline.serviceName={{app_name}}
diff --git a/app-packages/hbase-nopkg/resources/hbase-env-client.sh.j2 b/app-packages/hbase-nopkg/resources/hbase-env-client.sh.j2
new file mode 100644
index 0000000..2511a29
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hbase-env-client.sh.j2
@@ -0,0 +1,49 @@
+# Set environment variables here.
+
+# The java implementation to use. Java 1.6 required.
+export JAVA_HOME={{java64_home}}
+
+# Extra Java CLASSPATH elements. Optional.
+export HBASE_CLASSPATH=${HBASE_CLASSPATH}
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+# export HBASE_HEAPSIZE=1000
+
+# Extra Java runtime options.
+# Below are what we set by default. May only work with SUN JVM.
+# For more on why as well as other possible settings,
+# see http://wiki.apache.org/hadoop/PerformanceTuning
+export HBASE_OPTS="-XX:+UseConcMarkSweepGC"
+# Uncomment below to enable java garbage collection logging.
+# export HBASE_OPTS="$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log"
+
+# Uncomment and adjust to enable JMX exporting
+# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.
+# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html
+#
+# export HBASE_JMX_BASE="-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false"
+export HBASE_MASTER_OPTS="-Xmx{{master_heapsize}}"
+export HBASE_REGIONSERVER_OPTS="-Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}"
+# export HBASE_THRIFT_OPTS="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103"
+# export HBASE_ZOOKEEPER_OPTS="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104"
+
+# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.
+export HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers
+
+# Extra ssh options. Empty by default.
+# export HBASE_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR"
+
+# A string representing this instance of hbase. $USER by default.
+# export HBASE_IDENT_STRING=$USER
+
+# The scheduling priority for daemon processes. See 'man nice'.
+# export HBASE_NICENESS=10
+
+# Seconds to sleep between slave commands. Unset by default. This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HBASE_SLAVE_SLEEP=0.1
+
+# Tell HBase whether it should manage it's own instance of Zookeeper or not.
+export HBASE_MANAGES_ZK=false
+export HBASE_OPTS="$HBASE_OPTS {{hbase_opts}}"
diff --git a/app-packages/hbase-nopkg/resources/hbase-env-secured.sh.j2 b/app-packages/hbase-nopkg/resources/hbase-env-secured.sh.j2
new file mode 100644
index 0000000..9ee901d
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hbase-env-secured.sh.j2
@@ -0,0 +1,63 @@
+# Set environment variables here.
+
+# The java implementation to use. Java 1.6 required.
+export JAVA_HOME={{java64_home}}
+
+# HBase Configuration directory
+export HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{conf_dir}}}
+
+# Extra Java CLASSPATH elements. Optional.
+export HBASE_CLASSPATH=${HBASE_CLASSPATH}
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+# export HBASE_HEAPSIZE=1000
+
+# Extra Java runtime options.
+# Below are what we set by default. May only work with SUN JVM.
+# For more on why as well as other possible settings,
+# see http://wiki.apache.org/hadoop/PerformanceTuning
+export HBASE_OPTS="-XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log"
+export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:{{log_dir}}/gc.log-`date +'%Y%m%d%H%M'`"
+# Uncomment below to enable java garbage collection logging.
+# export HBASE_OPTS="$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log"
+
+# Uncomment and adjust to enable JMX exporting
+# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.
+# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html
+#
+# export HBASE_JMX_BASE="-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false"
+export HBASE_MASTER_OPTS="-Xmx{{master_heapsize}}"
+export HBASE_REGIONSERVER_OPTS="-Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}"
+# export HBASE_THRIFT_OPTS="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103"
+# export HBASE_ZOOKEEPER_OPTS="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104"
+
+# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.
+export HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers
+
+# Extra ssh options. Empty by default.
+# export HBASE_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR"
+
+# Where log files are stored. $HBASE_HOME/logs by default.
+export HBASE_LOG_DIR={{log_dir}}
+
+# A string representing this instance of hbase. $USER by default.
+# export HBASE_IDENT_STRING=$USER
+
+# The scheduling priority for daemon processes. See 'man nice'.
+# export HBASE_NICENESS=10
+
+# The directory where pid files are stored. /tmp by default.
+export HBASE_PID_DIR={{pid_dir}}
+
+# Seconds to sleep between slave commands. Unset by default. This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HBASE_SLAVE_SLEEP=0.1
+
+# Tell HBase whether it should manage it's own instance of Zookeeper or not.
+export HBASE_MANAGES_ZK=false
+export HBASE_OPTS="$HBASE_OPTS {{hbase_opts}}"
+
+export HBASE_OPTS="$HBASE_OPTS -Djava.security.auth.login.config={{client_jaas_config_file}}"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}}"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}}"
diff --git a/app-packages/hbase-nopkg/resources/hbase-env.sh.j2 b/app-packages/hbase-nopkg/resources/hbase-env.sh.j2
new file mode 100644
index 0000000..3ed354f
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hbase-env.sh.j2
@@ -0,0 +1,59 @@
+# Set environment variables here.
+
+# The java implementation to use. Java 1.6 required.
+export JAVA_HOME={{java64_home}}
+
+# HBase Configuration directory
+export HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{conf_dir}}}
+
+# Extra Java CLASSPATH elements. Optional.
+export HBASE_CLASSPATH=${HBASE_CLASSPATH}
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+# export HBASE_HEAPSIZE=1000
+
+# Extra Java runtime options.
+# Below are what we set by default. May only work with SUN JVM.
+# For more on why as well as other possible settings,
+# see http://wiki.apache.org/hadoop/PerformanceTuning
+export HBASE_OPTS="-XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log"
+export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:{{log_dir}}/gc.log-`date +'%Y%m%d%H%M'`"
+# Uncomment below to enable java garbage collection logging.
+# export HBASE_OPTS="$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log"
+
+# Uncomment and adjust to enable JMX exporting
+# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.
+# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html
+#
+# export HBASE_JMX_BASE="-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false"
+export HBASE_MASTER_OPTS="-Xmx{{master_heapsize}}"
+export HBASE_REGIONSERVER_OPTS="-Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}"
+# export HBASE_THRIFT_OPTS="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103"
+# export HBASE_ZOOKEEPER_OPTS="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104"
+
+# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.
+export HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers
+
+# Extra ssh options. Empty by default.
+# export HBASE_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR"
+
+# Where log files are stored. $HBASE_HOME/logs by default.
+export HBASE_LOG_DIR={{log_dir}}
+
+# A string representing this instance of hbase. $USER by default.
+# export HBASE_IDENT_STRING=$USER
+
+# The scheduling priority for daemon processes. See 'man nice'.
+# export HBASE_NICENESS=10
+
+# The directory where pid files are stored. /tmp by default.
+export HBASE_PID_DIR={{pid_dir}}
+
+# Seconds to sleep between slave commands. Unset by default. This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HBASE_SLAVE_SLEEP=0.1
+
+# Tell HBase whether it should manage it's own instance of Zookeeper or not.
+export HBASE_MANAGES_ZK=false
+export HBASE_OPTS="$HBASE_OPTS {{hbase_opts}}"
diff --git a/app-packages/hbase-nopkg/resources/hbase-log4j.properties.j2 b/app-packages/hbase-nopkg/resources/hbase-log4j.properties.j2
new file mode 100644
index 0000000..e0a7e22
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hbase-log4j.properties.j2
@@ -0,0 +1,110 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Define some default values that can be overridden by system properties
+hbase.root.logger=INFO,console
+hbase.security.logger=INFO,console
+hbase.log.dir=.
+hbase.log.file=hbase.log
+
+# Define the root logger to the system property "hbase.root.logger".
+log4j.rootLogger=${hbase.root.logger}
+
+# Logging Threshold
+log4j.threshold=ALL
+
+#
+# Daily Rolling File Appender
+#
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
+
+# Rolling File Appender properties
+hbase.log.maxfilesize=256MB
+hbase.log.maxbackupindex=20
+
+# Rolling File Appender
+log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
+
+log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
+log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
+
+log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
+
+#
+# Security audit appender
+#
+hbase.security.log.file=SecurityAuth.audit
+hbase.security.log.maxfilesize=256MB
+hbase.security.log.maxbackupindex=20
+log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
+log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
+log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
+log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
+log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
+log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.category.SecurityLogger=${hbase.security.logger}
+log4j.additivity.SecurityLogger=false
+#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
+
+#
+# Null Appender
+#
+log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
+
+# Custom Logging levels
+
+log4j.logger.org.apache.zookeeper=INFO
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+log4j.logger.org.apache.hadoop.hbase=DEBUG
+# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
+log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
+log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO
+#log4j.logger.org.apache.hadoop.dfs=DEBUG
+# Set this class to log INFO only otherwise its OTT
+# Enable this to get detailed connection error/retry logging.
+# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE
+
+
+# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
+#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
+
+# Uncomment the below if you want to remove logging of client region caching'
+# and scan of .META. messages
+# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
+# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
diff --git a/app-packages/hbase-nopkg/resources/hbase_client_jaas.conf.j2 b/app-packages/hbase-nopkg/resources/hbase_client_jaas.conf.j2
new file mode 100644
index 0000000..bb4279c
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hbase_client_jaas.conf.j2
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Client {
+com.sun.security.auth.module.Krb5LoginModule required
+useKeyTab=false
+useTicketCache=true;
+};
diff --git a/app-packages/hbase-nopkg/resources/hbase_master_jaas.conf.j2 b/app-packages/hbase-nopkg/resources/hbase_master_jaas.conf.j2
new file mode 100644
index 0000000..91ce3ef
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hbase_master_jaas.conf.j2
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Client {
+com.sun.security.auth.module.Krb5LoginModule required
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{master_keytab_path}}"
+principal="{{master_jaas_princ}}";
+};
diff --git a/app-packages/hbase-nopkg/resources/hbase_regionserver_jaas.conf.j2 b/app-packages/hbase-nopkg/resources/hbase_regionserver_jaas.conf.j2
new file mode 100644
index 0000000..2a9b9f3
--- /dev/null
+++ b/app-packages/hbase-nopkg/resources/hbase_regionserver_jaas.conf.j2
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Client {
+com.sun.security.auth.module.Krb5LoginModule required
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{regionserver_keytab_path}}"
+principal="{{regionserver_jaas_princ}}";
+};
diff --git a/app-packages/hbase-nopkg/src/assembly/hbase.xml b/app-packages/hbase-nopkg/src/assembly/hbase.xml
new file mode 100644
index 0000000..0558319
--- /dev/null
+++ b/app-packages/hbase-nopkg/src/assembly/hbase.xml
@@ -0,0 +1,89 @@
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+
+<assembly
+ xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+ <id>hbase_v${hbase.version}</id>
+ <formats>
+ <format>zip</format>
+ <format>dir</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+
+ <files>
+ <file>
+ <source>appConfig-default.json</source>
+ <outputDirectory>/</outputDirectory>
+ <filtered>true</filtered>
+ <fileMode>0755</fileMode>
+ </file>
+ <file>
+ <source>appConfig-secured-default.json</source>
+ <outputDirectory>/</outputDirectory>
+ <filtered>true</filtered>
+ <fileMode>0755</fileMode>
+ </file>
+ <file>
+ <source>clientInstallConfig-default.json</source>
+ <outputDirectory>/</outputDirectory>
+ <filtered>true</filtered>
+ <fileMode>0755</fileMode>
+ </file>
+ <file>
+ <source>metainfo.xml</source>
+ <outputDirectory>/</outputDirectory>
+ <filtered>true</filtered>
+ <fileMode>0755</fileMode>
+ </file>
+ <file>
+ <source>metainfo-secured.xml</source>
+ <outputDirectory>/</outputDirectory>
+ <filtered>true</filtered>
+ <fileMode>0755</fileMode>
+ </file>
+ <file>
+ <source>${pkg.src}/${pkg.name}</source>
+ <outputDirectory>resources</outputDirectory>
+ <filtered>false</filtered>
+ <fileMode>0755</fileMode>
+ </file>
+ </files>
+
+ <fileSets>
+ <fileSet>
+ <directory>${project.basedir}</directory>
+ <outputDirectory>/</outputDirectory>
+ <excludes>
+ <exclude>pom.xml</exclude>
+ <exclude>src/**</exclude>
+ <exclude>target/**</exclude>
+ <exclude>appConfig-default.json</exclude>
+ <exclude>appConfig-secured-default.json</exclude>
+ <exclude>clientInstallConfig-default.json</exclude>
+ <exclude>metainfo.xml</exclude>
+ <exclude>metainfo-secured.xml</exclude>
+ </excludes>
+ <fileMode>0755</fileMode>
+ <directoryMode>0755</directoryMode>
+ </fileSet>
+
+ </fileSets>
+</assembly>
diff --git a/pom.xml b/pom.xml
index 52c7459..7c21e07 100644
--- a/pom.xml
+++ b/pom.xml
@@ -173,6 +173,7 @@
<protobuf.version>2.5.0</protobuf.version>
<slf4j.version>1.7.5</slf4j.version>
+ <snakeyaml.version>1.16</snakeyaml.version>
<storm.version>0.9.3</storm.version>
<stringtemplate.version>2.4.1</stringtemplate.version>
<zookeeper.version>3.4.6</zookeeper.version>
@@ -1178,6 +1179,12 @@
<version>${slf4j.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.yaml</groupId>
+ <artifactId>snakeyaml</artifactId>
+ <version>${snakeyaml.version}</version>
+ </dependency>
+
<!-- Used for testing -->
<dependency>
<groupId>junit</groupId>
diff --git a/slider-agent/src/main/python/agent/ActionQueue.py b/slider-agent/src/main/python/agent/ActionQueue.py
index 7514337..e973337 100644
--- a/slider-agent/src/main/python/agent/ActionQueue.py
+++ b/slider-agent/src/main/python/agent/ActionQueue.py
@@ -161,7 +161,7 @@
self.commandStatuses.put_command_status(command, in_progress_status, reportResult)
store_config = False
- if ActionQueue.STORE_APPLIED_CONFIG in command['commandParams']:
+ if 'commandParams' in command and ActionQueue.STORE_APPLIED_CONFIG in command['commandParams']:
store_config = 'true' == command['commandParams'][ActionQueue.STORE_APPLIED_CONFIG]
store_command = False
if 'roleParams' in command and command['roleParams'] is not None and ActionQueue.AUTO_RESTART in command['roleParams']:
diff --git a/slider-agent/src/main/python/scripts/shell_cmd/basic_installer.py b/slider-agent/src/main/python/scripts/shell_cmd/basic_installer.py
index 561fd6c..df9b6f0 100644
--- a/slider-agent/src/main/python/scripts/shell_cmd/basic_installer.py
+++ b/slider-agent/src/main/python/scripts/shell_cmd/basic_installer.py
@@ -35,16 +35,15 @@
self.configure(env)
process_cmd = format("{cmd}")
- Execute(process_cmd,
- logoutput=False,
- wait_for_finish=False,
- pid_file=params.pid_file,
- poll_after = 5
- )
+ Execute(process_cmd)
def stop(self, env):
import params
env.set_params(params)
+ self.configure(env)
+ process_cmd = format("{cmd}")
+
+ Execute(process_cmd)
def status(self, env):
import params
diff --git a/slider-core/pom.xml b/slider-core/pom.xml
index eda352f..57507d6 100644
--- a/slider-core/pom.xml
+++ b/slider-core/pom.xml
@@ -254,6 +254,12 @@
</dependency>
<dependency>
+ <groupId>org.yaml</groupId>
+ <artifactId>snakeyaml</artifactId>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<scope>compile</scope>
diff --git a/slider-core/src/main/java/org/apache/slider/client/ClientUtils.java b/slider-core/src/main/java/org/apache/slider/client/ClientUtils.java
new file mode 100644
index 0000000..c3ccb1d
--- /dev/null
+++ b/slider-core/src/main/java/org/apache/slider/client/ClientUtils.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.slider.client;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.PathNotFoundException;
+import org.apache.hadoop.registry.client.api.RegistryOperations;
+import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
+import org.apache.hadoop.registry.client.exceptions.NoRecordException;
+import org.apache.hadoop.registry.client.types.ServiceRecord;
+import org.apache.slider.common.SliderKeys;
+import org.apache.slider.core.exceptions.BadCommandArgumentsException;
+import org.apache.slider.core.exceptions.NotFoundException;
+import org.apache.slider.core.exceptions.SliderException;
+import org.apache.slider.core.registry.docstore.ConfigFormat;
+import org.apache.slider.core.registry.docstore.PublishedConfigSet;
+import org.apache.slider.core.registry.docstore.PublishedConfiguration;
+import org.apache.slider.core.registry.docstore.PublishedConfigurationOutputter;
+import org.apache.slider.core.registry.retrieve.RegistryRetriever;
+
+import java.io.File;
+import java.io.IOException;
+
+import static org.apache.hadoop.registry.client.binding.RegistryUtils.currentUser;
+import static org.apache.hadoop.registry.client.binding.RegistryUtils.servicePath;
+
+public class ClientUtils {
+ public static ServiceRecord lookupServiceRecord(RegistryOperations rops,
+ String user, String name) throws IOException, SliderException {
+ return lookupServiceRecord(rops, user, null, name);
+ }
+
+ public static ServiceRecord lookupServiceRecord(RegistryOperations rops,
+ String user, String type, String name) throws IOException,
+ SliderException {
+ if (StringUtils.isEmpty(user)) {
+ user = currentUser();
+ } else {
+ user = RegistryPathUtils.encodeForRegistry(user);
+ }
+ if (StringUtils.isEmpty(type)) {
+ type = SliderKeys.APP_TYPE;
+ }
+
+ String path = servicePath(user, type, name);
+ return resolve(rops, path);
+ }
+
+ public static ServiceRecord resolve(RegistryOperations rops, String path)
+ throws IOException, SliderException {
+ try {
+ return rops.resolve(path);
+ } catch (PathNotFoundException | NoRecordException e) {
+ throw new NotFoundException(e.getPath().toString(), e);
+ }
+ }
+
+ public static PublishedConfiguration getConfigFromRegistry(
+ RegistryOperations rops, Configuration configuration,
+ String configName, String appName, String user, boolean external)
+ throws IOException, SliderException {
+ ServiceRecord instance = lookupServiceRecord(rops, user, appName);
+
+ RegistryRetriever retriever = new RegistryRetriever(configuration, instance);
+ PublishedConfigSet configurations = retriever.getConfigurations(external);
+
+ PublishedConfiguration published = retriever.retrieveConfiguration(
+ configurations, configName, external);
+ return published;
+ }
+
+ public static String saveOrReturnConfig(PublishedConfiguration published,
+ String format, File destPath, String fileName)
+ throws BadCommandArgumentsException, IOException {
+ ConfigFormat configFormat = ConfigFormat.resolve(format);
+ if (configFormat == null) {
+ throw new BadCommandArgumentsException(
+ "Unknown/Unsupported format %s ", format);
+ }
+ PublishedConfigurationOutputter outputter =
+ PublishedConfigurationOutputter.createOutputter(configFormat,
+ published);
+ boolean print = destPath == null;
+ if (!print) {
+ if (destPath.isDirectory()) {
+ // creating it under a directory
+ destPath = new File(destPath, fileName);
+ }
+ outputter.save(destPath);
+ return null;
+ } else {
+ return outputter.asString();
+ }
+ }
+}
diff --git a/slider-core/src/main/java/org/apache/slider/client/SliderClient.java b/slider-core/src/main/java/org/apache/slider/client/SliderClient.java
index dd90e46..eb0630d 100644
--- a/slider-core/src/main/java/org/apache/slider/client/SliderClient.java
+++ b/slider-core/src/main/java/org/apache/slider/client/SliderClient.java
@@ -101,6 +101,7 @@
import org.apache.slider.common.params.ActionPackageArgs;
import org.apache.slider.common.params.ActionRegistryArgs;
import org.apache.slider.common.params.ActionResolveArgs;
+import org.apache.slider.common.params.ActionResourceArgs;
import org.apache.slider.common.params.ActionStatusArgs;
import org.apache.slider.common.params.ActionThawArgs;
import org.apache.slider.common.params.ActionTokensArgs;
@@ -177,7 +178,6 @@
import java.io.BufferedReader;
import java.io.File;
-import java.io.FilenameFilter;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
@@ -415,7 +415,7 @@
case ACTION_INSTALL_PACKAGE:
exitCode = actionInstallPkg(serviceArgs.getActionInstallPackageArgs());
break;
-
+
case ACTION_KEYTAB:
exitCode = actionKeytab(serviceArgs.getActionKeytabArgs());
break;
@@ -443,7 +443,11 @@
case ACTION_RESOLVE:
exitCode = actionResolve(serviceArgs.getActionResolveArgs());
break;
-
+
+ case ACTION_RESOURCE:
+ exitCode = actionResource(serviceArgs.getActionResourceArgs());
+ break;
+
case ACTION_STATUS:
exitCode = actionStatus(clusterName, serviceArgs.getActionStatusArgs());
break;
@@ -1029,7 +1033,8 @@
Path fileInFs = new Path(pkgPath, keytabInfo.keytab );
log.info("Deleting keytab {}", fileInFs);
FileSystem sfs = sliderFileSystem.getFileSystem();
- require(sfs.exists(fileInFs), "No keytab to delete found at %s", fileInFs.toUri());
+ require(sfs.exists(fileInFs), "No keytab to delete found at %s",
+ fileInFs.toUri());
sfs.delete(fileInFs, false);
return EXIT_SUCCESS;
@@ -1105,6 +1110,103 @@
}
@Override
+ public int actionResource(ActionResourceArgs resourceInfo)
+ throws YarnException, IOException {
+ if (resourceInfo.install) {
+ return actionInstallResource(resourceInfo);
+ } else if (resourceInfo.delete) {
+ return actionDeleteResource(resourceInfo);
+ } else if (resourceInfo.list) {
+ return actionListResource(resourceInfo);
+ } else {
+ throw new BadCommandArgumentsException(
+ "Resource option specified not found.\n"
+ + CommonArgs.usage(serviceArgs, ACTION_RESOURCE));
+ }
+ }
+
+ private int actionListResource(ActionResourceArgs resourceInfo) throws IOException {
+ String folder = resourceInfo.folder != null ? resourceInfo.folder : StringUtils.EMPTY;
+ Path path = sliderFileSystem.buildResourcePath(folder);
+ RemoteIterator<LocatedFileStatus> files =
+ sliderFileSystem.getFileSystem().listFiles(path, true);
+ log.info("Resources:");
+ while (files.hasNext()) {
+ log.info("\t" + files.next().getPath().toString());
+ }
+
+ return EXIT_SUCCESS;
+ }
+
+ private int actionDeleteResource(ActionResourceArgs resourceInfo)
+ throws BadCommandArgumentsException, IOException {
+ if (StringUtils.isEmpty(resourceInfo.resource)) {
+ throw new BadCommandArgumentsException("A file name is required.");
+ }
+
+ Path fileInFs;
+ if (resourceInfo.folder == null) {
+ fileInFs = sliderFileSystem.buildResourcePath(resourceInfo.resource);
+ } else {
+ fileInFs = sliderFileSystem.buildResourcePath(resourceInfo.folder,
+ resourceInfo.resource);
+ }
+
+ log.info("Deleting resource {}", fileInFs);
+ FileSystem sfs = sliderFileSystem.getFileSystem();
+ require(sfs.exists(fileInFs), "No resource to delete found at %s", fileInFs.toUri());
+ sfs.delete(fileInFs, true);
+
+ return EXIT_SUCCESS;
+ }
+
+ private int actionInstallResource(ActionResourceArgs resourceInfo)
+ throws BadCommandArgumentsException, IOException {
+ Path srcFile = null;
+ String folder = resourceInfo.folder != null ? resourceInfo.folder : StringUtils.EMPTY;
+
+ requireArgumentSet(Arguments.ARG_RESOURCE, resourceInfo.resource);
+ File file = new File(resourceInfo.resource);
+ require(file.isFile() || file.isDirectory(),
+ "Unable to access supplied file at %s", file.getAbsolutePath());
+
+ File[] files;
+ if (file.isDirectory()) {
+ files = file.listFiles();
+ } else {
+ files = new File[] { file };
+ }
+
+ Path pkgPath = sliderFileSystem.buildResourcePath(folder);
+ FileSystem sfs = sliderFileSystem.getFileSystem();
+
+ if (!sfs.exists(pkgPath)) {
+ sfs.mkdirs(pkgPath);
+ sfs.setPermission(pkgPath, new FsPermission(
+ FsAction.ALL, FsAction.NONE, FsAction.NONE));
+ } else {
+ require(sfs.isDirectory(pkgPath), "Specified folder %s exists and is " +
+ "not a directory", folder);
+ }
+
+ for (File f : files) {
+ srcFile = new Path(f.toURI());
+
+ Path fileInFs = new Path(pkgPath, srcFile.getName());
+ log.info("Installing file {} at {} and overwrite is {}.",
+ srcFile, fileInFs, resourceInfo.overwrite);
+ require(!(sfs.exists(fileInFs) && !resourceInfo.overwrite),
+ "File exists at %s. Use --overwrite to overwrite.", fileInFs.toUri());
+
+ sfs.copyFromLocalFile(false, resourceInfo.overwrite, srcFile, fileInFs);
+ sfs.setPermission(fileInFs,
+ new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE));
+ }
+
+ return EXIT_SUCCESS;
+ }
+
+ @Override
public int actionClient(ActionClientArgs clientInfo) throws
YarnException,
IOException {
@@ -1199,8 +1301,21 @@
E_INVALID_INSTALL_PATH + ": " + clientInfo.installLocation.getAbsolutePath());
File pkgFile;
- require(isSet(clientInfo.packageURI), E_INVALID_APPLICATION_PACKAGE_LOCATION);
- pkgFile = new File(clientInfo.packageURI);
+ File tmpDir = null;
+
+ require(isSet(clientInfo.packageURI) || isSet(clientInfo.name),
+ E_INVALID_APPLICATION_PACKAGE_LOCATION);
+ if (isSet(clientInfo.packageURI)) {
+ pkgFile = new File(clientInfo.packageURI);
+ } else {
+ Path appDirPath = sliderFileSystem.buildAppDefDirPath(clientInfo.name);
+ Path appDefPath = new Path(appDirPath, SliderKeys.DEFAULT_APP_PKG);
+ require(sliderFileSystem.isFile(appDefPath),
+ E_INVALID_APPLICATION_PACKAGE_LOCATION);
+ tmpDir = Files.createTempDir();
+ pkgFile = new File(tmpDir, SliderKeys.DEFAULT_APP_PKG);
+ sliderFileSystem.copyHdfsFileToLocal(appDefPath, pkgFile);
+ }
require(pkgFile.isFile(),
E_UNABLE_TO_READ_SUPPLIED_PACKAGE_FILE + " at %s", pkgFile.getAbsolutePath());
@@ -1222,6 +1337,8 @@
AbstractClientProvider
provider = createClientProvider(SliderProviderFactory.DEFAULT_CLUSTER_TYPE);
provider.processClientOperation(sliderFileSystem,
+ getRegistryOperations(),
+ getConfig(),
"INSTALL",
clientInfo.installLocation,
pkgFile,
@@ -4062,17 +4179,9 @@
@VisibleForTesting
public PublishedConfiguration actionRegistryGetConfig(ActionRegistryArgs registryArgs)
throws YarnException, IOException {
- ServiceRecord instance = lookupServiceRecord(registryArgs);
-
- RegistryRetriever retriever = new RegistryRetriever(getConfig(), instance);
- boolean external = !registryArgs.internal;
- PublishedConfigSet configurations =
- retriever.getConfigurations(external);
-
- PublishedConfiguration published = retriever.retrieveConfiguration(configurations,
- registryArgs.getConf,
- external);
- return published;
+ return ClientUtils.getConfigFromRegistry(getRegistryOperations(),
+ getConfig(), registryArgs.getConf, registryArgs.name, registryArgs.user,
+ !registryArgs.internal);
}
/**
@@ -4115,27 +4224,11 @@
// decide whether or not to print
String entry = registryArgs.getConf;
String format = registryArgs.format;
- ConfigFormat configFormat = ConfigFormat.resolve(format);
- if (configFormat == null) {
- throw new BadCommandArgumentsException(
- "Unknown/Unsupported format %s ", format);
+ String output = ClientUtils.saveOrReturnConfig(published,
+ registryArgs.format, registryArgs.out, entry + "." + format);
+ if (output != null) {
+ print(output);
}
- PublishedConfigurationOutputter outputter =
- PublishedConfigurationOutputter.createOutputter(configFormat,
- published);
- boolean print = registryArgs.out == null;
- if (!print) {
- File outputPath = registryArgs.out;
- if (outputPath.isDirectory()) {
- // creating it under a directory
- outputPath = new File(outputPath, entry + "." + format);
- }
- log.debug("Destination path: {}", outputPath);
- outputter.save(outputPath);
- } else {
- print(outputter.asString());
- }
-
}
/**
@@ -4185,32 +4278,8 @@
private ServiceRecord lookupServiceRecord(ActionRegistryArgs registryArgs) throws
SliderException,
IOException {
- String user;
- if (StringUtils.isNotEmpty(registryArgs.user)) {
- user = RegistryPathUtils.encodeForRegistry(registryArgs.user);
- } else {
- user = currentUser();
- }
-
- String path = servicePath(user, registryArgs.serviceType,
- registryArgs.name);
- return resolve(path);
- }
-
- /**
- * Look up a service record of the current user
- * @param serviceType service type
- * @param id instance ID
- * @return instance data
- * @throws UnknownApplicationInstanceException no path or service record
- * at the end of the path
- * @throws SliderException other failures
- * @throws IOException IO problems or wrapped exceptions
- */
- public ServiceRecord lookupServiceRecord(String serviceType, String id)
- throws IOException, SliderException {
- String path = servicePath(currentUser(), serviceType, id);
- return resolve(path);
+ return ClientUtils.lookupServiceRecord(getRegistryOperations(),
+ registryArgs.user, registryArgs.serviceType, registryArgs.name);
}
/**
@@ -4225,11 +4294,7 @@
*/
public ServiceRecord resolve(String path)
throws IOException, SliderException {
- try {
- return getRegistryOperations().resolve(path);
- } catch (PathNotFoundException | NoRecordException e) {
- throw new NotFoundException(e.getPath().toString(), e);
- }
+ return ClientUtils.resolve(getRegistryOperations(), path);
}
/**
diff --git a/slider-core/src/main/java/org/apache/slider/client/SliderClientAPI.java b/slider-core/src/main/java/org/apache/slider/client/SliderClientAPI.java
index 5c5d96b..30f6ba9 100644
--- a/slider-core/src/main/java/org/apache/slider/client/SliderClientAPI.java
+++ b/slider-core/src/main/java/org/apache/slider/client/SliderClientAPI.java
@@ -43,6 +43,7 @@
import org.apache.slider.common.params.ActionListArgs;
import org.apache.slider.common.params.ActionRegistryArgs;
import org.apache.slider.common.params.ActionResolveArgs;
+import org.apache.slider.common.params.ActionResourceArgs;
import org.apache.slider.common.params.ActionStatusArgs;
import org.apache.slider.common.params.ActionThawArgs;
import org.apache.slider.common.params.ActionUpgradeArgs;
@@ -117,7 +118,6 @@
* @throws YarnException Yarn problems
* @throws IOException other problems
* @throws BadCommandArgumentsException bad arguments.
- * @deprecated use #actionKeytab
*/
int actionKeytab(ActionKeytabArgs keytabInfo)
throws YarnException, IOException;
@@ -134,6 +134,17 @@
throws YarnException, IOException;
/**
+ * Manage file resources leveraged by slider
+ *
+ * @param resourceInfo the arguments needed to manage the resource
+ * @throws YarnException Yarn problems
+ * @throws IOException other problems
+ * @throws BadCommandArgumentsException bad arguments.
+ */
+ int actionResource(ActionResourceArgs resourceInfo)
+ throws YarnException, IOException;
+
+ /**
* Perform client operations such as install or configure
*
* @param clientInfo the arguments needed for client operations
diff --git a/slider-core/src/main/java/org/apache/slider/common/SliderKeys.java b/slider-core/src/main/java/org/apache/slider/common/SliderKeys.java
index 05c7048..ba3effc 100644
--- a/slider-core/src/main/java/org/apache/slider/common/SliderKeys.java
+++ b/slider-core/src/main/java/org/apache/slider/common/SliderKeys.java
@@ -116,7 +116,8 @@
String HISTORY_FILENAME_SUFFIX = "json";
String HISTORY_FILENAME_PREFIX = "rolehistory-";
String KEYTAB_DIR = "keytabs";
-
+ String RESOURCE_DIR = "resources";
+
/**
* Filename pattern is required to save in strict temporal order.
* Important: older files must sort less-than newer files when using
diff --git a/slider-core/src/main/java/org/apache/slider/common/params/ActionResourceArgs.java b/slider-core/src/main/java/org/apache/slider/common/params/ActionResourceArgs.java
new file mode 100644
index 0000000..60fcc87
--- /dev/null
+++ b/slider-core/src/main/java/org/apache/slider/common/params/ActionResourceArgs.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.slider.common.params;
+
+import com.beust.jcommander.Parameter;
+import com.beust.jcommander.Parameters;
+
+@Parameters(commandNames = {SliderActions.ACTION_RESOURCE},
+ commandDescription = SliderActions.DESCRIBE_ACTION_RESOURCE)
+
+public class ActionResourceArgs extends AbstractActionArgs {
+
+ @Override
+ public String getActionName() {
+ return SliderActions.ACTION_RESOURCE;
+ }
+
+ @Parameter(names = {ARG_INSTALL},
+ description = "Install the resource(s)")
+ public boolean install;
+
+ @Parameter(names = {ARG_DELETE},
+ description = "Delete the file")
+ public boolean delete;
+
+ @Parameter(names = {ARG_LIST},
+ description = "List of installed files")
+ public boolean list;
+
+ @Parameter(names = {ARG_RESOURCE},
+ description = "Name of the file or directory")
+ public String resource;
+
+ @Parameter(names = {ARG_DESTDIR},
+ description = "The name of the folder in which to store the resources")
+ public String folder;
+
+ @Parameter(names = {ARG_OVERWRITE}, description = "Overwrite existing resource(s)")
+ public boolean overwrite = false;
+
+ /**
+ * Get the min #of params expected
+ * @return the min number of params in the {@link #parameters} field
+ */
+ public int getMinParams() {
+ return 0;
+ }
+
+ @Override
+ public int getMaxParams() {
+ return 3;
+ }
+}
diff --git a/slider-core/src/main/java/org/apache/slider/common/params/Arguments.java b/slider-core/src/main/java/org/apache/slider/common/params/Arguments.java
index 0a8388d..aec4e26 100644
--- a/slider-core/src/main/java/org/apache/slider/common/params/Arguments.java
+++ b/slider-core/src/main/java/org/apache/slider/common/params/Arguments.java
@@ -104,6 +104,7 @@
String ARG_PROVIDER = "--provider";
String ARG_QUEUE = "--queue";
String ARG_REPLACE_PKG = "--replacepkg";
+ String ARG_RESOURCE = "--resource";
String ARG_RESOURCES = "--resources";
String ARG_RES_COMP_OPT = "--rescompopt";
String ARG_RES_COMP_OPT_SHORT = "--rco";
diff --git a/slider-core/src/main/java/org/apache/slider/common/params/ClientArgs.java b/slider-core/src/main/java/org/apache/slider/common/params/ClientArgs.java
index 0a658ea..4016cc9 100644
--- a/slider-core/src/main/java/org/apache/slider/common/params/ClientArgs.java
+++ b/slider-core/src/main/java/org/apache/slider/common/params/ClientArgs.java
@@ -75,6 +75,7 @@
private final ActionPackageArgs actionPackageArgs = new ActionPackageArgs();
private final ActionRegistryArgs actionRegistryArgs = new ActionRegistryArgs();
private final ActionResolveArgs actionResolveArgs = new ActionResolveArgs();
+ private final ActionResourceArgs actionResourceArgs = new ActionResourceArgs();
private final ActionStatusArgs actionStatusArgs = new ActionStatusArgs();
private final ActionThawArgs actionThawArgs = new ActionThawArgs();
private final ActionTokensArgs actionTokenArgs = new ActionTokensArgs();
@@ -116,6 +117,7 @@
actionPackageArgs,
actionRegistryArgs,
actionResolveArgs,
+ actionResourceArgs,
actionStatusArgs,
actionThawArgs,
actionTokenArgs,
@@ -227,6 +229,10 @@
return actionResolveArgs;
}
+ public ActionResourceArgs getActionResourceArgs() {
+ return actionResourceArgs;
+ }
+
public ActionStatusArgs getActionStatusArgs() {
return actionStatusArgs;
}
@@ -346,6 +352,10 @@
bindCoreAction(actionResolveArgs);
break;
+ case ACTION_RESOURCE:
+ bindCoreAction(actionResourceArgs);
+ break;
+
case ACTION_STATUS:
bindCoreAction(actionStatusArgs);
break;
diff --git a/slider-core/src/main/java/org/apache/slider/common/params/SliderActions.java b/slider-core/src/main/java/org/apache/slider/common/params/SliderActions.java
index aab7c98..e209cdc 100644
--- a/slider-core/src/main/java/org/apache/slider/common/params/SliderActions.java
+++ b/slider-core/src/main/java/org/apache/slider/common/params/SliderActions.java
@@ -51,6 +51,7 @@
String ACTION_RECONFIGURE = "reconfigure";
String ACTION_REGISTRY = "registry";
String ACTION_RESOLVE = "resolve";
+ String ACTION_RESOURCE = "resource";
String ACTION_STATUS = "status";
String ACTION_THAW = "start";
String ACTION_TOKENS = "tokens";
@@ -106,6 +107,7 @@
" Deprecated, use '" + ACTION_KEYTAB + " " + ClientArgs.ARG_INSTALL + "'.";
String DESCRIBE_ACTION_KEYTAB = "Manage a Kerberos keytab file (install, delete, list) in the sub-folder 'keytabs' of the user's Slider base directory";
String DESCRIBE_ACTION_DIAGNOSTIC = "Diagnose the configuration of the running slider application and slider client";
+ String DESCRIBE_ACTION_RESOURCE = "Manage a file (install, delete, list) in the sub-folder 'resources' of the user's Slider base directory";
}
diff --git a/slider-core/src/main/java/org/apache/slider/common/tools/CoreFileSystem.java b/slider-core/src/main/java/org/apache/slider/common/tools/CoreFileSystem.java
index 6a02367..0b0f1bc 100644
--- a/slider-core/src/main/java/org/apache/slider/common/tools/CoreFileSystem.java
+++ b/slider-core/src/main/java/org/apache/slider/common/tools/CoreFileSystem.java
@@ -23,6 +23,7 @@
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -64,6 +65,8 @@
private static final Logger
log = LoggerFactory.getLogger(CoreFileSystem.class);
+ private static final String UTF_8 = "UTF-8";
+
protected final FileSystem fileSystem;
protected final Configuration configuration;
@@ -209,6 +212,55 @@
}
/**
+ * Build up the path string for resource install location -no attempt to
+ * create the directory is made
+ *
+ * @return the path for resource
+ */
+ public Path buildResourcePath(String resourceFolder) {
+ Preconditions.checkNotNull(resourceFolder);
+ Path path = getBaseApplicationPath();
+ return new Path(path, SliderKeys.RESOURCE_DIR + "/" + resourceFolder);
+ }
+
+ /**
+ * Build up the path string for resource install location -no attempt to
+ * create the directory is made
+ *
+ * @return the path for resource
+ */
+ public Path buildResourcePath(String dirName, String fileName) {
+ Preconditions.checkNotNull(dirName);
+ Preconditions.checkNotNull(fileName);
+ Path path = getBaseApplicationPath();
+ return new Path(path, SliderKeys.RESOURCE_DIR + "/" + dirName + "/" + fileName);
+ }
+
+ /**
+ * Build up the path string for cluster resource install location -no
+ * attempt to create the directory is made
+ *
+ * @return the path for resource
+ */
+ public Path buildClusterResourcePath(String clusterName, String component) {
+ Preconditions.checkNotNull(clusterName);
+ Path path = buildClusterDirPath(clusterName);
+ return new Path(path, SliderKeys.RESOURCE_DIR + "/" + component);
+ }
+
+ /**
+ * Build up the path string for cluster resource install location -no
+ * attempt to create the directory is made
+ *
+ * @return the path for resource
+ */
+ public Path buildClusterResourcePath(String clusterName) {
+ Preconditions.checkNotNull(clusterName);
+ Path path = buildClusterDirPath(clusterName);
+ return new Path(path, SliderKeys.RESOURCE_DIR);
+ }
+
+ /**
* Create the Slider cluster path for a named cluster and all its subdirs
* This is a directory; a mkdirs() operation is executed
* to ensure that it is there.
@@ -713,6 +765,17 @@
fileSystem.setPermission(destPath, fp);
}
+ public void copyHdfsFileToLocal(Path hdfsPath, File destFile)
+ throws IOException {
+ if (hdfsPath == null || destFile == null) {
+ throw new IOException("Either hdfsPath or destPath is null");
+ }
+ log.info("Copying file {} to {}", hdfsPath.toUri(), destFile.toURI());
+
+ Path destPath = new Path(destFile.getPath());
+ fileSystem.copyToLocalFile(hdfsPath, destPath);
+ }
+
/**
* list entries in a filesystem directory
*
@@ -778,6 +841,14 @@
out.close();
}
+ public String cat(Path path) throws IOException {
+ FileStatus status = fileSystem.getFileStatus(path);
+ byte[] b = new byte[(int) status.getLen()];
+ FSDataInputStream in = fileSystem.open(path);
+ int count = in.read(b);
+ return new String(b, 0, count, UTF_8);
+ }
+
/**
* Create a path that must exist in the cluster fs
* @param uri uri to create
diff --git a/slider-core/src/main/java/org/apache/slider/core/conf/ConfTreeOperations.java b/slider-core/src/main/java/org/apache/slider/core/conf/ConfTreeOperations.java
index 9013edb..d376c72 100644
--- a/slider-core/src/main/java/org/apache/slider/core/conf/ConfTreeOperations.java
+++ b/slider-core/src/main/java/org/apache/slider/core/conf/ConfTreeOperations.java
@@ -345,7 +345,21 @@
confTreeSerDeser.fromFile(resource) );
return ops;
}
-
+
+ /**
+ * Load from a json String. The inner conf tree is the loaded data -unresolved
+ * @param json json string
+ * @return loaded value
+ * @throws IOException load failure
+ */
+ public static ConfTreeOperations fromString(String json) throws
+ IOException {
+ ConfTreeSerDeser confTreeSerDeser = new ConfTreeSerDeser();
+ ConfTreeOperations ops = new ConfTreeOperations(
+ confTreeSerDeser.fromJson(json) );
+ return ops;
+ }
+
/**
* Build from an existing instance -which is cloned via JSON ser/deser
* @param instance the source instance
@@ -431,6 +445,20 @@
}
/**
+ * Get a component opt as a boolean using {@link Boolean#valueOf(String)}.
+ *
+ * @param name component name
+ * @param option option name
+ * @param defVal default value
+ * @return parsed value
+ * @throws NumberFormatException if the role could not be parsed.
+ */
+ public boolean getComponentOptBool(String name, String option, boolean defVal) {
+ String val = getComponentOpt(name, option, Boolean.toString(defVal));
+ return Boolean.valueOf(val);
+ }
+
+ /**
* Set a component option, creating the component if necessary
* @param component component name
* @param option option name
diff --git a/slider-core/src/main/java/org/apache/slider/core/launch/AbstractLauncher.java b/slider-core/src/main/java/org/apache/slider/core/launch/AbstractLauncher.java
index 0348828..efc58b4 100644
--- a/slider-core/src/main/java/org/apache/slider/core/launch/AbstractLauncher.java
+++ b/slider-core/src/main/java/org/apache/slider/core/launch/AbstractLauncher.java
@@ -50,6 +50,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
/**
* Launcher of applications: base class
@@ -71,6 +72,7 @@
Records.newRecord(ContainerLaunchContext.class);
protected final List<String> commands = new ArrayList<>(20);
protected final Map<String, LocalResource> localResources = new HashMap<>();
+ protected final Map<String, String> mountPaths = new HashMap<>();
private final Map<String, ByteBuffer> serviceData = new HashMap<>();
// security
protected final Credentials credentials;
@@ -135,6 +137,11 @@
localResources.put(subpath, resource);
}
+ public void addLocalResource(String subpath, LocalResource resource, String mountPath) {
+ localResources.put(subpath, resource);
+ mountPaths.put(subpath, mountPath);
+ }
+
/**
* Add a set of local resources
* @param resourceMap map of name:resource to add
@@ -227,6 +234,16 @@
env.put("YARN_CONTAINER_RUNTIME_TYPE", "docker");
env.put("YARN_CONTAINER_RUNTIME_DOCKER_IMAGE", dockerImage);//if yarnDockerMode, then dockerImage is set
env.put("YARN_CONTAINER_RUNTIME_DOCKER_RUN_PRIVILEGED_CONTAINER", runPrivilegedContainer);
+ StringBuilder sb = new StringBuilder();
+ for (Entry<String,String> mount : mountPaths.entrySet()) {
+ if (sb.length() > 0) {
+ sb.append(",");
+ }
+ sb.append(mount.getKey());
+ sb.append(":");
+ sb.append(mount.getValue());
+ }
+ env.put("YARN_CONTAINER_RUNTIME_DOCKER_LOCAL_RESOURCE_MOUNTS", sb.toString());
log.info("yarn docker env var has been set {}", containerLaunchContext.getEnvironment().toString());
}
diff --git a/slider-core/src/main/java/org/apache/slider/core/persist/AppDefinitionPersister.java b/slider-core/src/main/java/org/apache/slider/core/persist/AppDefinitionPersister.java
index 8efaa5b..7fb3158 100644
--- a/slider-core/src/main/java/org/apache/slider/core/persist/AppDefinitionPersister.java
+++ b/slider-core/src/main/java/org/apache/slider/core/persist/AppDefinitionPersister.java
@@ -150,7 +150,11 @@
pkgSrcDir.mkdirs();
File destMetaInfo = new File(pkgSrcDir, "metainfo.json");
if (isFileUsed) {
- Files.copy(buildInfo.appMetaInfo, destMetaInfo);
+ if (buildInfo.appMetaInfo.getName().endsWith(".xml")) {
+ Files.copy(buildInfo.appMetaInfo, new File(pkgSrcDir, "metainfo.xml"));
+ } else {
+ Files.copy(buildInfo.appMetaInfo, destMetaInfo);
+ }
} else {
Files.write(
buildInfo.appMetaInfoJson.getBytes(Charset.forName("UTF-8")),
diff --git a/slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigFormat.java b/slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigFormat.java
index 12581d7..ddab606 100644
--- a/slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigFormat.java
+++ b/slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigFormat.java
@@ -23,8 +23,10 @@
JSON("json"),
PROPERTIES("properties"),
XML("xml"),
+ HADOOP_XML("hadoop-xml"),
ENV("env"),
-// YAML("yaml");
+ TEMPLATE("template"),
+ YAML("yaml"),
;
ConfigFormat(String suffix) {
this.suffix = suffix;
diff --git a/slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigUtils.java b/slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigUtils.java
new file mode 100644
index 0000000..2e1615b
--- /dev/null
+++ b/slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigUtils.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.slider.core.registry.docstore;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.slider.common.tools.SliderFileSystem;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class ConfigUtils {
+ public static final String TEMPLATE_FILE = "template.file";
+
+ public static String replaceProps(Map<String, String> config, String content) {
+ Map<String, String> tokens = new HashMap<>();
+ for (Entry<String, String> entry : config.entrySet()) {
+ tokens.put("${" + entry.getKey() + "}", entry.getValue());
+ tokens.put("{{" + entry.getKey() + "}}", entry.getValue());
+ }
+ String value = content;
+ for (Map.Entry<String,String> token : tokens.entrySet()) {
+ value = value.replaceAll(Pattern.quote(token.getKey()),
+ Matcher.quoteReplacement(token.getValue()));
+ }
+ return value;
+ }
+
+ public static Map<String, String> replacePropsInConfig(
+ Map<String, String> config, Map<String, String> env) {
+ Map<String, String> tokens = new HashMap<>();
+ for (Entry<String, String> entry : env.entrySet()) {
+ tokens.put("${" + entry.getKey() + "}", entry.getValue());
+ }
+ Map<String, String> newConfig = new HashMap<>();
+ for (Entry<String, String> entry : config.entrySet()) {
+ String value = entry.getValue();
+ for (Map.Entry<String,String> token : tokens.entrySet()) {
+ value = value.replaceAll(Pattern.quote(token.getKey()),
+ Matcher.quoteReplacement(token.getValue()));
+ }
+ newConfig.put(entry.getKey(), entry.getValue());
+ }
+ return newConfig;
+ }
+
+ public static void prepConfigForTemplateOutputter(ConfigFormat configFormat,
+ Map<String, String> config, SliderFileSystem fileSystem,
+ String clusterName, String fileName) throws IOException {
+ if (!configFormat.equals(ConfigFormat.TEMPLATE)) {
+ return;
+ }
+ Path templateFile = null;
+ if (config.containsKey(TEMPLATE_FILE)) {
+ templateFile = fileSystem.buildResourcePath(config.get(TEMPLATE_FILE));
+ if (!fileSystem.isFile(templateFile)) {
+ templateFile = fileSystem.buildResourcePath(clusterName,
+ config.get(TEMPLATE_FILE));
+ }
+ if (!fileSystem.isFile(templateFile)) {
+ throw new IOException("config specified template file " + config
+ .get(TEMPLATE_FILE) + " but " + templateFile + " doesn't exist");
+ }
+ }
+ if (templateFile == null && fileName != null) {
+ templateFile = fileSystem.buildResourcePath(fileName);
+ if (!fileSystem.isFile(templateFile)) {
+ templateFile = fileSystem.buildResourcePath(clusterName,
+ fileName);
+ }
+ }
+ if (fileSystem.isFile(templateFile)) {
+ config.put("content", fileSystem.cat(templateFile));
+ } else {
+ config.put("content", "");
+ }
+ }
+}
diff --git a/slider-core/src/main/java/org/apache/slider/core/registry/docstore/PublishedConfigurationOutputter.java b/slider-core/src/main/java/org/apache/slider/core/registry/docstore/PublishedConfigurationOutputter.java
index 15ac207..9bdcfcb 100644
--- a/slider-core/src/main/java/org/apache/slider/core/registry/docstore/PublishedConfigurationOutputter.java
+++ b/slider-core/src/main/java/org/apache/slider/core/registry/docstore/PublishedConfigurationOutputter.java
@@ -24,9 +24,11 @@
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.slider.common.tools.ConfigHelper;
+import org.yaml.snakeyaml.DumperOptions;
+import org.yaml.snakeyaml.DumperOptions.FlowStyle;
+import org.yaml.snakeyaml.Yaml;
import java.io.File;
-import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.StringWriter;
@@ -57,14 +59,7 @@
}
*/
public void save(File dest) throws IOException {
- FileOutputStream out = null;
- try {
- out = new FileOutputStream(dest);
- save(out);
- out.close();
- } finally {
- org.apache.hadoop.io.IOUtils.closeStream(out);
- }
+ FileUtils.writeStringToFile(dest, asString(), Charsets.UTF_8);
}
/**
@@ -89,12 +84,13 @@
* @param owner owning config
* @return the outputter
*/
-
+
public static PublishedConfigurationOutputter createOutputter(ConfigFormat format,
PublishedConfiguration owner) {
Preconditions.checkNotNull(owner);
switch (format) {
case XML:
+ case HADOOP_XML:
return new XmlOutputter(owner);
case PROPERTIES:
return new PropertiesOutputter(owner);
@@ -102,11 +98,15 @@
return new JsonOutputter(owner);
case ENV:
return new EnvOutputter(owner);
+ case TEMPLATE:
+ return new TemplateOutputter(owner);
+ case YAML:
+ return new YamlOutputter(owner);
default:
throw new RuntimeException("Unsupported format :" + format);
}
}
-
+
public static class XmlOutputter extends PublishedConfigurationOutputter {
@@ -131,7 +131,7 @@
return configuration;
}
}
-
+
public static class PropertiesOutputter extends PublishedConfigurationOutputter {
private final Properties properties;
@@ -146,15 +146,15 @@
properties.store(out, "");
}
-
+
public String asString() throws IOException {
StringWriter sw = new StringWriter();
properties.store(sw, "");
return sw.toString();
}
}
-
-
+
+
public static class JsonOutputter extends PublishedConfigurationOutputter {
public JsonOutputter(PublishedConfiguration owner) {
@@ -162,11 +162,6 @@
}
@Override
- public void save(File dest) throws IOException {
- FileUtils.writeStringToFile(dest, asString(), Charsets.UTF_8);
- }
-
- @Override
public String asString() throws IOException {
return owner.asJson();
}
@@ -180,19 +175,36 @@
}
@Override
- public void save(File dest) throws IOException {
- FileUtils.writeStringToFile(dest, asString(), Charsets.UTF_8);
- }
-
- @Override
public String asString() throws IOException {
if (!owner.entries.containsKey("content")) {
throw new IOException("Configuration has no content field and cannot " +
"be retrieved as type 'env'");
}
- return owner.entries.get("content");
+ String content = owner.entries.get("content");
+ return ConfigUtils.replaceProps(owner.entries, content);
}
}
+ public static class TemplateOutputter extends EnvOutputter {
+ public TemplateOutputter(PublishedConfiguration owner) {
+ super(owner);
+ }
+ }
+
+ public static class YamlOutputter extends PublishedConfigurationOutputter {
+
+ private final Yaml yaml;
+
+ public YamlOutputter(PublishedConfiguration owner) {
+ super(owner);
+ DumperOptions options = new DumperOptions();
+ options.setDefaultFlowStyle(FlowStyle.BLOCK);
+ yaml = new Yaml(options);
+ }
+
+ public String asString() throws IOException {
+ return yaml.dump(owner.entries);
+ }
+ }
}
diff --git a/slider-core/src/main/java/org/apache/slider/providers/AbstractClientProvider.java b/slider-core/src/main/java/org/apache/slider/providers/AbstractClientProvider.java
index fcab65e..510de5d 100644
--- a/slider-core/src/main/java/org/apache/slider/providers/AbstractClientProvider.java
+++ b/slider-core/src/main/java/org/apache/slider/providers/AbstractClientProvider.java
@@ -21,6 +21,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.registry.client.api.RegistryOperations;
import org.apache.slider.common.tools.SliderFileSystem;
import org.apache.slider.core.conf.AggregateConf;
import org.apache.slider.core.conf.ConfTreeOperations;
@@ -223,18 +224,22 @@
/**
* Process client operations for applications such as install, configure
* @param fileSystem
+ * @param registryOperations
+ * @param configuration
* @param operation
* @param clientInstallPath
* @param clientPackage
- * @param config
+ * @param clientConfig
* @param name
* @throws SliderException
*/
public void processClientOperation(SliderFileSystem fileSystem,
+ RegistryOperations registryOperations,
+ Configuration configuration,
String operation,
File clientInstallPath,
File clientPackage,
- JSONObject config,
+ JSONObject clientConfig,
String name)
throws SliderException {
throw new SliderException("Provider does not support client operations.");
diff --git a/slider-core/src/main/java/org/apache/slider/providers/agent/AgentClientProvider.java b/slider-core/src/main/java/org/apache/slider/providers/agent/AgentClientProvider.java
index f3dcd1d..3eef0b0 100644
--- a/slider-core/src/main/java/org/apache/slider/providers/agent/AgentClientProvider.java
+++ b/slider-core/src/main/java/org/apache/slider/providers/agent/AgentClientProvider.java
@@ -22,13 +22,16 @@
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
+import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.registry.client.api.RegistryOperations;
import org.apache.hadoop.registry.client.binding.RegistryUtils;
import org.apache.slider.api.InternalKeys;
import org.apache.slider.api.ResourceKeys;
+import org.apache.slider.client.ClientUtils;
import org.apache.slider.common.SliderKeys;
import org.apache.slider.common.tools.SliderFileSystem;
import org.apache.slider.common.tools.SliderUtils;
@@ -38,13 +41,18 @@
import org.apache.slider.core.exceptions.BadConfigException;
import org.apache.slider.core.exceptions.SliderException;
import org.apache.slider.core.launch.AbstractLauncher;
+import org.apache.slider.core.registry.docstore.PublishedConfiguration;
import org.apache.slider.providers.AbstractClientProvider;
import org.apache.slider.providers.ProviderRole;
import org.apache.slider.providers.ProviderUtils;
import org.apache.slider.providers.agent.application.metadata.Application;
import org.apache.slider.providers.agent.application.metadata.Component;
+import org.apache.slider.providers.agent.application.metadata.ConfigFile;
import org.apache.slider.providers.agent.application.metadata.Metainfo;
import org.apache.slider.providers.agent.application.metadata.MetainfoParser;
+import org.apache.slider.providers.agent.application.metadata.OSPackage;
+import org.apache.slider.providers.agent.application.metadata.OSSpecific;
+import org.apache.slider.providers.agent.application.metadata.Package;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
@@ -53,7 +61,6 @@
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
-import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
@@ -291,6 +298,8 @@
@Override
public void processClientOperation(SliderFileSystem fileSystem,
+ RegistryOperations rops,
+ Configuration configuration,
String operation,
File clientInstallPath,
File appPackage,
@@ -319,51 +328,25 @@
{
ZipEntry zipEntry = zipInputStream.getNextEntry();
while (zipEntry != null) {
- if ("metainfo.xml".equals(zipEntry.getName())) {
- int size = (int) zipEntry.getSize();
- if (size != -1) {
- log.info("Reading {} of size {}", zipEntry.getName(),
- zipEntry.getSize());
- byte[] content = new byte[size];
- int offset = 0;
- while (offset < size) {
- offset += zipInputStream.read(content, offset, size - offset);
- }
- metaInfo = new MetainfoParser().fromXmlStream(new ByteArrayInputStream(content));
- }
- } else if ("metainfo.json".equals(zipEntry.getName())) {
- int size = (int) zipEntry.getSize();
- if (size != -1) {
- log.info("Reading {} of size {}", zipEntry.getName(),
- zipEntry.getSize());
- byte[] content = new byte[size];
- int offset = 0;
- while (offset < size) {
- offset += zipInputStream.read(content, offset, size - offset);
- }
- metaInfo = new MetainfoParser().fromJsonStream(new ByteArrayInputStream(content));
- }
- } else if ("clientInstallConfig-default.json".equals(zipEntry.getName())) {
- int size = (int) zipEntry.getSize();
- if (size != -1) {
- log.info("Reading {} of size {}", zipEntry.getName(),
- zipEntry.getSize());
- byte[] content = new byte[size];
- int offset = 0;
- while (offset < size) {
- offset += zipInputStream.read(content, offset, size - offset);
- }
+ log.info("Processing {}", zipEntry.getName());
+ String filePath = appPkgDir + File.separator + zipEntry.getName();
+ if (!zipEntry.isDirectory()) {
+ log.info("Extracting file {}", filePath);
+ extractFile(zipInputStream, filePath);
+
+ if ("metainfo.xml".equals(zipEntry.getName())) {
+ metaInfo = new MetainfoParser().fromXmlStream(new FileInputStream(filePath));
+ } else if ("metainfo.json".equals(zipEntry.getName())) {
+ metaInfo = new MetainfoParser().fromJsonStream(new FileInputStream(filePath));
+ } else if ("clientInstallConfig-default.json".equals(zipEntry.getName())) {
try {
- defaultConfig = new JSONObject(new String(content, Charset.defaultCharset()));
+ defaultConfig = new JSONObject(FileUtils.readFileToString(new File(filePath), Charset.defaultCharset()));
} catch (JSONException jex) {
throw new SliderException("Unable to read default client config.", jex);
}
}
- }
- String filePath = appPkgDir + File.separator + zipEntry.getName();
- if (!zipEntry.isDirectory()) {
- extractFile(zipInputStream, filePath);
} else {
+ log.info("Creating dir {}", filePath);
File dir = new File(filePath);
dir.mkdir();
}
@@ -379,34 +362,107 @@
throw new BadConfigException(E_COULD_NOT_READ_METAINFO);
}
- expandAgentTar(agentPkgDir);
-
- JSONObject commandJson = getCommandJson(defaultConfig, config, metaInfo, clientInstallPath, name);
- FileWriter file = new FileWriter(new File(cmdDir, "command.json"));
- try {
- file.write(commandJson.toString());
-
- } catch (IOException e) {
- e.printStackTrace();
- } finally {
- file.flush();
- file.close();
- }
-
String client_script = null;
+ String clientComponent = null;
for (Component component : metaInfo.getApplication().getComponents()) {
if (component.getCategory().equals("CLIENT")) {
- client_script = component.getCommandScript().getScript();
- log.info("Installing CLIENT {} using script {}", component.getName(), client_script);
+ clientComponent = component.getName();
+ if (component.getCommandScript() != null) {
+ client_script = component.getCommandScript().getScript();
+ }
break;
}
}
if (SliderUtils.isUnset(client_script)) {
- throw new SliderException("No valid CLIENT component found. Aborting install.");
- }
+ log.info("Installing CLIENT without script");
+ List<Package> packages = metaInfo.getApplication().getPackages();
+ if (packages != null && packages.size() > 0) {
+ // retrieve package resources from HDFS and extract
+ for (Package pkg : packages) {
+ Path pkgPath = fileSystem.buildResourcePath(pkg.getName());
+ if (!fileSystem.isFile(pkgPath) && name != null) {
+ pkgPath = fileSystem.buildResourcePath(name, pkg.getName());
+ }
+ if (!fileSystem.isFile(pkgPath)) {
+ throw new IOException("Package doesn't exist as a resource: " +
+ pkg.getName());
+ }
+ if ("archive".equals(pkg.getType())) {
+ File pkgFile = new File(tmpDir, pkg.getName());
+ fileSystem.copyHdfsFileToLocal(pkgPath, pkgFile);
+ expandTar(pkgFile, clientInstallPath);
+ } else {
+ File pkgFile = new File(clientInstallPath, pkg.getName());
+ fileSystem.copyHdfsFileToLocal(pkgPath, pkgFile);
+ }
+ }
+ } else {
+ // extract tarball from app def
+ for (OSSpecific osSpecific : metaInfo.getApplication()
+ .getOSSpecifics()) {
+ for (OSPackage pkg : osSpecific.getPackages()) {
+ if ("tarball".equals(pkg.getType())) {
+ File pkgFile = new File(appPkgDir, pkg.getName());
+ expandTar(pkgFile, clientInstallPath);
+ }
+ }
+ }
+ }
+ if (name == null) {
+ log.warn("Conf files not being generated because no app name was " +
+ "provided");
+ return;
+ }
+ File confInstallDir;
+ String clientRoot = null;
+ if (defaultConfig != null) {
+ try {
+ clientRoot = defaultConfig.getJSONObject("global")
+ .getString(AgentKeys.APP_CLIENT_ROOT);
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+ }
+ if (config != null) {
+ try {
+ clientRoot = config.getJSONObject("global")
+ .getString(AgentKeys.APP_CLIENT_ROOT);
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+ }
+ if (clientRoot == null) {
+ confInstallDir = clientInstallPath;
+ } else {
+ confInstallDir = new File(new File(clientInstallPath, clientRoot), "conf");
+ if (!confInstallDir.exists()) {
+ confInstallDir.mkdirs();
+ }
+ }
+ String user = RegistryUtils.currentUser();
+ for (ConfigFile configFile : metaInfo.getComponentConfigFiles(clientComponent)) {
+ retrieveConfigFile(rops, configuration, configFile, name, user,
+ confInstallDir);
+ }
+ } else {
+ log.info("Installing CLIENT using script {}", client_script);
+ expandAgentTar(agentPkgDir);
- runCommand(appPkgDir, agentPkgDir, cmdDir, client_script);
+ JSONObject commandJson = getCommandJson(defaultConfig, config, metaInfo, clientInstallPath, name);
+ FileWriter file = new FileWriter(new File(cmdDir, "command.json"));
+ try {
+ file.write(commandJson.toString());
+
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ file.flush();
+ file.close();
+ }
+
+ runCommand(appPkgDir, agentPkgDir, cmdDir, client_script);
+ }
} catch (IOException ioex) {
log.warn("Error while executing INSTALL command {}", ioex.getMessage());
@@ -481,6 +537,11 @@
String libDirProp =
System.getProperty(SliderKeys.PROPERTY_LIB_DIR);
File tarFile = new File(libDirProp, SliderKeys.AGENT_TAR);
+ expandTar(tarFile, agentPkgDir);
+ }
+
+ private void expandTar(File tarFile, File destDir) throws IOException {
+ log.info("Expanding tar {} to {}", tarFile, destDir);
TarArchiveInputStream tarIn = new TarArchiveInputStream(
new GzipCompressorInputStream(
new BufferedInputStream(
@@ -491,11 +552,14 @@
try {
TarArchiveEntry tarEntry = tarIn.getNextTarEntry();
while (tarEntry != null) {
- File destPath = new File(agentPkgDir, tarEntry.getName());
+ File destPath = new File(destDir, tarEntry.getName());
+ File parent = destPath.getParentFile();
+ if (!parent.exists()) {
+ parent.mkdirs();
+ }
if (tarEntry.isDirectory()) {
destPath.mkdirs();
} else {
- destPath.createNewFile();
byte[] byteToRead = new byte[1024];
BufferedOutputStream buffOut =
new BufferedOutputStream(new FileOutputStream(destPath));
@@ -508,6 +572,9 @@
buffOut.close();
}
}
+ if ((tarEntry.getMode() & 0100) != 0) {
+ destPath.setExecutable(true);
+ }
tarEntry = tarIn.getNextTarEntry();
}
} finally {
@@ -515,6 +582,17 @@
}
}
+ private void retrieveConfigFile(RegistryOperations rops,
+ Configuration configuration, ConfigFile configFile, String name,
+ String user, File destDir) throws IOException, SliderException {
+ log.info("Retrieving config {} to {}", configFile.getDictionaryName(),
+ destDir);
+ PublishedConfiguration published = ClientUtils.getConfigFromRegistry(rops,
+ configuration, configFile.getDictionaryName(), name, user, true);
+ ClientUtils.saveOrReturnConfig(published, configFile.getType(),
+ destDir, configFile.getFileName());
+ }
+
protected JSONObject getCommandJson(JSONObject defaultConfig,
JSONObject inputConfig,
Metainfo metainfo,
diff --git a/slider-core/src/main/java/org/apache/slider/providers/agent/AgentKeys.java b/slider-core/src/main/java/org/apache/slider/providers/agent/AgentKeys.java
index b027939..01a3f1a 100644
--- a/slider-core/src/main/java/org/apache/slider/providers/agent/AgentKeys.java
+++ b/slider-core/src/main/java/org/apache/slider/providers/agent/AgentKeys.java
@@ -48,6 +48,7 @@
*/
String APP_HOME = "app.home";
String APP_ROOT = "site.global.app_root";
+ String APP_CLIENT_ROOT = "client_root";
/**
* Runas user of the application
*/
@@ -77,11 +78,16 @@
String APP_RESOURCES = "application.resources";
String APP_RESOURCES_DIR = "app/resources";
+ String APP_CONF_DIR = "app/conf";
+
String AGENT_INSTALL_DIR = "infra/agent";
String APP_DEFINITION_DIR = "app/definition";
String ADDON_DEFINITION_DIR = "addon/definition";
String AGENT_CONFIG_FILE = "infra/conf/agent.ini";
String AGENT_VERSION_FILE = "infra/version";
+ String APP_PACKAGES_DIR = "app/packages";
+ String PER_COMPONENT = "per.component";
+ String PER_GROUP = "per.group";
String JAVA_HOME = "java_home";
String PACKAGE_LIST = "package_list";
@@ -97,6 +103,7 @@
String CERT_FILE_LOCALIZATION_PATH = INFRA_RUN_SECURITY_DIR + "ca.crt";
String KEY_CONTAINER_LAUNCH_DELAY = "container.launch.delay.sec";
String TEST_RELAX_VERIFICATION = "test.relax.validation";
+ String AM_CONFIG_GENERATION = "am.config.generation";
}
diff --git a/slider-core/src/main/java/org/apache/slider/providers/agent/AgentProviderService.java b/slider-core/src/main/java/org/apache/slider/providers/agent/AgentProviderService.java
index f20757a..bc04220 100644
--- a/slider-core/src/main/java/org/apache/slider/providers/agent/AgentProviderService.java
+++ b/slider-core/src/main/java/org/apache/slider/providers/agent/AgentProviderService.java
@@ -60,8 +60,11 @@
import org.apache.slider.core.exceptions.SliderException;
import org.apache.slider.core.launch.CommandLineBuilder;
import org.apache.slider.core.launch.ContainerLauncher;
+import org.apache.slider.core.registry.docstore.ConfigFormat;
+import org.apache.slider.core.registry.docstore.ConfigUtils;
import org.apache.slider.core.registry.docstore.ExportEntry;
import org.apache.slider.core.registry.docstore.PublishedConfiguration;
+import org.apache.slider.core.registry.docstore.PublishedConfigurationOutputter;
import org.apache.slider.core.registry.docstore.PublishedExports;
import org.apache.slider.core.registry.info.CustomRegistryConstants;
import org.apache.slider.providers.AbstractProviderService;
@@ -124,7 +127,6 @@
import java.util.List;
import java.util.Locale;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Scanner;
import java.util.Set;
import java.util.TreeMap;
@@ -132,7 +134,6 @@
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.regex.Pattern;
import static org.apache.slider.server.appmaster.web.rest.RestPaths.SLIDER_PATH_AGENTS;
@@ -169,6 +170,8 @@
private AgentClientProvider clientProvider;
private AtomicInteger taskId = new AtomicInteger(0);
private volatile Metainfo metaInfo = null;
+ private AggregateConf instanceDefinition = null;
+ private SliderFileSystem fileSystem = null;
private Map<String, DefaultConfig> defaultConfigs = null;
private ComponentCommandOrder commandOrder = null;
private HeartbeatMonitor monitor;
@@ -281,6 +284,8 @@
if (metaInfo == null) {
synchronized (syncLock) {
if (metaInfo == null) {
+ this.instanceDefinition = instanceDefinition;
+ this.fileSystem = fileSystem;
readAndSetHeartbeatMonitoringInterval(instanceDefinition);
initializeAgentDebugCommands(instanceDefinition);
@@ -435,6 +440,26 @@
LocalResourceType.ARCHIVE);
launcher.addLocalResource(AgentKeys.APP_DEFINITION_DIR, appDefRes);
+ for (Package pkg : getMetaInfo().getApplication().getPackages()) {
+ Path pkgPath = fileSystem.buildResourcePath(pkg.getName());
+ if (!fileSystem.isFile(pkgPath)) {
+ pkgPath = fileSystem.buildResourcePath(getClusterName(),
+ pkg.getName());
+ }
+ if (!fileSystem.isFile(pkgPath)) {
+ throw new IOException("Package doesn't exist as a resource: " +
+ pkg.getName());
+ }
+ log.info("Adding resource {}", pkg.getName());
+ LocalResourceType type = LocalResourceType.FILE;
+ if ("archive".equals(pkg.getType())) {
+ type = LocalResourceType.ARCHIVE;
+ }
+ LocalResource packageResource = fileSystem.createAmResource(
+ pkgPath, type);
+ launcher.addLocalResource(AgentKeys.APP_PACKAGES_DIR, packageResource);
+ }
+
String agentConf = instanceDefinition.getAppConfOperations().
getGlobalOptions().getOption(AgentKeys.AGENT_CONF, "");
if (SliderUtils.isSet(agentConf)) {
@@ -476,6 +501,15 @@
generatedConfPath,
SliderKeys.PROPAGATED_CONF_DIR_NAME));
+ if (appComponent.getOptionBool(AgentKeys.AM_CONFIG_GENERATION, false)) {
+ // build and localize configuration files
+ Map<String, Map<String, String>> configurations =
+ buildCommandConfigurations(instanceDefinition.getAppConfOperations(),
+ container.getId().toString(), roleName, roleGroup);
+ localizeConfigFiles(launcher, roleName, roleGroup, getMetaInfo(),
+ configurations, launcher.getEnv(), fileSystem);
+ }
+
String label = getContainerLabel(container, roleName, roleGroup);
CommandLineBuilder operation = new CommandLineBuilder();
@@ -646,11 +680,27 @@
private Path uploadSecurityResource(File resource, SliderFileSystem fileSystem)
throws IOException {
Path certsDir = fileSystem.buildClusterSecurityDirPath(getClusterName());
- if (!fileSystem.getFileSystem().exists(certsDir)) {
- fileSystem.getFileSystem().mkdirs(certsDir,
+ return uploadResource(resource, fileSystem, certsDir);
+ }
+
+ private Path uploadResource(File resource, SliderFileSystem fileSystem,
+ String roleName) throws IOException {
+ Path dir;
+ if (roleName == null) {
+ dir = fileSystem.buildClusterResourcePath(getClusterName());
+ } else {
+ dir = fileSystem.buildClusterResourcePath(getClusterName(), roleName);
+ }
+ return uploadResource(resource, fileSystem, dir);
+ }
+
+ private static synchronized Path uploadResource(File resource,
+ SliderFileSystem fileSystem, Path parentDir) throws IOException {
+ if (!fileSystem.getFileSystem().exists(parentDir)) {
+ fileSystem.getFileSystem().mkdirs(parentDir,
new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
}
- Path destPath = new Path(certsDir, resource.getName());
+ Path destPath = new Path(parentDir, resource.getName());
if (!fileSystem.getFileSystem().exists(destPath)) {
FSDataOutputStream os = fileSystem.getFileSystem().create(destPath);
byte[] contents = FileUtils.readFileToByteArray(resource);
@@ -659,6 +709,9 @@
os.flush();
os.close();
log.info("Uploaded {} to localization path {}", resource, destPath);
+ } else {
+ log.info("Resource {} already existed at localization path {}", resource,
+ destPath);
}
while (!fileSystem.getFileSystem().exists(destPath)) {
@@ -718,6 +771,69 @@
}
}
+ private void createConfigFile(SliderFileSystem fileSystem, File file,
+ ConfigFile configFile, Map<String, String> config)
+ throws IOException {
+ ConfigFormat configFormat = ConfigFormat.resolve(configFile.getType());
+ log.info("Writing {} file {}", configFormat, file);
+
+ ConfigUtils.prepConfigForTemplateOutputter(configFormat, config,
+ fileSystem, getClusterName(), file.getName());
+ PublishedConfiguration publishedConfiguration =
+ new PublishedConfiguration(configFile.getDictionaryName(),
+ config.entrySet());
+ PublishedConfigurationOutputter configurationOutputter =
+ PublishedConfigurationOutputter.createOutputter(configFormat,
+ publishedConfiguration);
+ configurationOutputter.save(file);
+ }
+
+ @VisibleForTesting
+ protected void localizeConfigFiles(ContainerLauncher launcher,
+ String roleName, String roleGroup,
+ Metainfo metainfo,
+ Map<String, Map<String, String>> configs,
+ MapOperations env,
+ SliderFileSystem fileSystem)
+ throws IOException {
+ for (ConfigFile configFile : metainfo.getComponentConfigFiles(roleGroup)) {
+ Map<String, String> config = ConfigUtils.replacePropsInConfig(
+ configs.get(configFile.getDictionaryName()), env.options);
+ String fileName = ConfigUtils.replaceProps(config,
+ configFile.getFileName());
+ File localFile = new File(SliderKeys.RESOURCE_DIR);
+ if (!localFile.exists()) {
+ localFile.mkdir();
+ }
+ localFile = new File(localFile, new File(fileName).getName());
+
+ String folder = null;
+ if ("true".equals(config.get(PER_COMPONENT))) {
+ folder = roleName;
+ } else if ("true".equals(config.get(PER_GROUP))) {
+ folder = roleGroup;
+ }
+
+ log.info("Localizing {} configs to config file {} (destination {}) " +
+ "based on {} configs", config.size(), localFile, fileName,
+ configFile.getDictionaryName());
+ createConfigFile(fileSystem, localFile, configFile, config);
+ Path destPath = uploadResource(localFile, fileSystem, folder);
+ LocalResource configResource = fileSystem.createAmResource(destPath,
+ LocalResourceType.FILE);
+
+ File destFile = new File(fileName);
+ if (destFile.isAbsolute()) {
+ launcher.addLocalResource(
+ SliderKeys.RESOURCE_DIR + "/" + destFile.getName(),
+ configResource, fileName);
+ } else {
+ launcher.addLocalResource(AgentKeys.APP_CONF_DIR + "/" + fileName,
+ configResource);
+ }
+ }
+ }
+
/**
* build the zookeeper registry path.
*
@@ -1242,6 +1358,69 @@
} catch (URISyntaxException e) {
throw new IOException(e);
}
+
+ // identify client component
+ Component client = null;
+ for (Component component : getMetaInfo().getApplication().getComponents()) {
+ if (component != null && component.getCategory().equals("CLIENT")) {
+ client = component;
+ break;
+ }
+ }
+ if (client == null) {
+ log.info("No client component specified, not publishing client configs");
+ return;
+ }
+
+ // register AM-generated client configs
+ ConfTreeOperations appConf = instanceDefinition.getAppConfOperations();
+ MapOperations clientOperations = appConf.getOrAddComponent(client.getName());
+ appConf.resolve();
+ if (!clientOperations.getOptionBool(AgentKeys.AM_CONFIG_GENERATION,
+ false)) {
+ log.info("AM config generation is false, not publishing client configs");
+ return;
+ }
+
+ // build and localize configuration files
+ Map<String, Map<String, String>> configurations = new TreeMap<String, Map<String, String>>();
+ Map<String, String> tokens = null;
+ try {
+ tokens = getStandardTokenMap(appConf, client.getName(), client.getName());
+ } catch (SliderException e) {
+ throw new IOException(e);
+ }
+
+ for (ConfigFile configFile : getMetaInfo()
+ .getComponentConfigFiles(client.getName())) {
+ addNamedConfiguration(configFile.getDictionaryName(),
+ appConf.getGlobalOptions().options, configurations, tokens, null,
+ client.getName());
+ if (appConf.getComponent(client.getName()) != null) {
+ addNamedConfiguration(configFile.getDictionaryName(),
+ appConf.getComponent(client.getName()).options, configurations,
+ tokens, null, client.getName());
+ }
+ }
+
+ //do a final replacement of re-used configs
+ dereferenceAllConfigs(configurations);
+
+ for (ConfigFile configFile : getMetaInfo()
+ .getComponentConfigFiles(client.getName())) {
+ ConfigFormat configFormat = ConfigFormat.resolve(configFile.getType());
+
+ Map<String, String> config = configurations.get(configFile.getDictionaryName());
+ ConfigUtils.prepConfigForTemplateOutputter(configFormat, config,
+ fileSystem, getClusterName(),
+ new File(configFile.getFileName()).getName());
+ PublishedConfiguration publishedConfiguration =
+ new PublishedConfiguration(configFile.getDictionaryName(),
+ config.entrySet());
+ getAmState().getPublishedSliderConfigurations().put(
+ configFile.getDictionaryName(), publishedConfiguration);
+ log.info("Publishing AM configuration {}", configFile.getDictionaryName());
+ }
}
@Override
@@ -1585,7 +1764,9 @@
if (status.getConfigs() != null) {
Application application = getMetaInfo().getApplication();
- if (canAnyMasterPublishConfig() == false || canPublishConfig(componentGroup)) {
+ if ((!canAnyMasterPublishConfig() || canPublishConfig(componentGroup)) &&
+ !instanceDefinition.getAppConfOperations().getComponentOptBool(
+ componentGroup, AgentKeys.AM_CONFIG_GENERATION, false)) {
// If no Master can explicitly publish then publish if its a master
// Otherwise, wait till the master that can publish is ready
@@ -1709,7 +1890,11 @@
simpleEntries.put(entry.getKey(), entry.getValue().get(0).getValue());
}
}
- publishApplicationInstanceData(groupName, groupName, simpleEntries.entrySet());
+ if (!instanceDefinition.getAppConfOperations().getComponentOptBool(
+ groupName, AgentKeys.AM_CONFIG_GENERATION, false)) {
+ publishApplicationInstanceData(groupName, groupName,
+ simpleEntries.entrySet());
+ }
PublishedExports exports = new PublishedExports(groupName);
exports.setUpdated(new Date().getTime());
@@ -2036,7 +2221,7 @@
cmd.setConfigurations(configurations);
Map<String, Map<String, String>> componentConfigurations = buildComponentConfigurations(appConf);
cmd.setComponentConfigurations(componentConfigurations);
-
+
if (SliderUtils.isSet(scriptPath)) {
cmd.setCommandParams(commandParametersSet(scriptPath, timeout, false));
} else {
@@ -2154,10 +2339,10 @@
List<String> packages = new ArrayList<>();
if (application != null) {
if (application.getPackages().size() > 0) {
- List<Package> appPackages = application.getPackages();
- for (Package appPackage : appPackages) {
- packages.add(String.format(pkgFormatString, appPackage.getType(), appPackage.getName()));
- }
+ // no-op if there are packages that are not OS-specific, as these
+ // will be localized by AM rather than the Agent
+ // this should be backwards compatible, as there was previously an
+ // XML parsing bug that ensured non-OS-specific packages did not exist
} else {
List<OSSpecific> osSpecifics = application.getOSSpecifics();
if (osSpecifics != null && osSpecifics.size() > 0) {
@@ -2821,14 +3006,41 @@
}
}
+ boolean finished = false;
+ while (!finished) {
+ finished = true;
+ for (Map.Entry<String, String> entry : allConfigs.entrySet()) {
+ String configValue = entry.getValue();
+ for (Map.Entry<String, String> lookUpEntry : allConfigs.entrySet()) {
+ String lookUpValue = lookUpEntry.getValue();
+ if (lookUpValue.contains("${@//site/")) {
+ continue;
+ }
+ String lookUpKey = lookUpEntry.getKey();
+ if (configValue != null && configValue.contains(lookUpKey)) {
+ configValue = configValue.replace(lookUpKey, lookUpValue);
+ }
+ }
+ if (!configValue.equals(entry.getValue())) {
+ finished = false;
+ allConfigs.put(entry.getKey(), configValue);
+ }
+ }
+ }
+
for (String configType : configurations.keySet()) {
Map<String, String> configBucket = configurations.get(configType);
for (Map.Entry<String, String> entry: configBucket.entrySet()) {
String configName = entry.getKey();
String configValue = entry.getValue();
- for (String lookUpKey : allConfigs.keySet()) {
+ for (Map.Entry<String, String> lookUpEntry : allConfigs.entrySet()) {
+ String lookUpValue = lookUpEntry.getValue();
+ if (lookUpValue.contains("${@//site/")) {
+ continue;
+ }
+ String lookUpKey = lookUpEntry.getKey();
if (configValue != null && configValue.contains(lookUpKey)) {
- configValue = configValue.replace(lookUpKey, allConfigs.get(lookUpKey));
+ configValue = configValue.replace(lookUpKey, lookUpValue);
}
}
configBucket.put(configName, configValue);
@@ -2974,6 +3186,7 @@
config.put("app_log_dir", "${AGENT_LOG_ROOT}");
config.put("app_pid_dir", "${AGENT_WORK_ROOT}/app/run");
config.put("app_install_dir", "${AGENT_WORK_ROOT}/app/install");
+ config.put("app_conf_dir", "${AGENT_WORK_ROOT}/" + AgentKeys.APP_CONF_DIR);
config.put("app_input_conf_dir", "${AGENT_WORK_ROOT}/" + SliderKeys.PROPAGATED_CONF_DIR_NAME);
config.put("app_container_id", containerId);
config.put("app_container_tag", tags.getTag(roleName, containerId));
diff --git a/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/AbstractComponent.java b/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/AbstractComponent.java
index 1b63b58..b6ae4de 100644
--- a/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/AbstractComponent.java
+++ b/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/AbstractComponent.java
@@ -65,6 +65,10 @@
this.commands = commands;
}
+ public void addCommand(ComponentCommand command) {
+ commands.add(command);
+ }
+
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("{");
diff --git a/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Application.java b/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Application.java
index 63546a4..5556c7f 100644
--- a/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Application.java
+++ b/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Application.java
@@ -116,6 +116,10 @@
return commandOrders;
}
+ public void addPackage(Package pkg) {
+ packages.add(pkg);
+ }
+
@JsonProperty("packages")
public List<Package> getPackages() {
return packages;
diff --git a/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Component.java b/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Component.java
index 3f23455..78bb8c1 100644
--- a/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Component.java
+++ b/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Component.java
@@ -38,7 +38,8 @@
String type = TYPE_STANDARD;
List<ComponentExport> componentExports = new ArrayList<>();
List<DockerContainer> dockerContainers = new ArrayList<>();
-
+ List<ConfigFile> configFiles = new ArrayList<>();
+
public Component() {
}
@@ -155,6 +156,15 @@
return Boolean.parseBoolean(this.autoStartOnFailure);
}
+ public void addConfigFile(ConfigFile configFile) {
+ this.configFiles.add(configFile);
+ }
+
+ @JsonProperty("configFiles")
+ public List<ConfigFile> getConfigFiles() {
+ return configFiles;
+ }
+
@Override
public String toString() {
final StringBuilder sb =
diff --git a/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Metainfo.java b/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Metainfo.java
index 036d98e..10c497f 100644
--- a/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Metainfo.java
+++ b/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/Metainfo.java
@@ -21,6 +21,9 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.util.ArrayList;
+import java.util.List;
+
/**
* Application metainfo uber class
*/
@@ -71,6 +74,16 @@
return null;
}
+ public List<ConfigFile> getComponentConfigFiles(String roleGroup) {
+ List<ConfigFile> componentConfigFiles = new ArrayList<>();
+ componentConfigFiles.addAll(application.getConfigFiles());
+ Component component = getApplicationComponent(roleGroup);
+ if (component != null) {
+ componentConfigFiles.addAll(component.getConfigFiles());
+ }
+ return componentConfigFiles;
+ }
+
public void validate() throws SliderException {
if (!VERSION_TWO_ONE.equals(schemaVersion) &&
!VERSION_TWO_ZERO.equals(schemaVersion)) {
diff --git a/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/MetainfoParser.java b/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/MetainfoParser.java
index a6f0e9d..8b520eb 100644
--- a/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/MetainfoParser.java
+++ b/slider-core/src/main/java/org/apache/slider/providers/agent/application/metadata/MetainfoParser.java
@@ -67,14 +67,25 @@
digester.addBeanPropertySetter("*/commandScript/timeout");
digester.addSetNext("*/commandScript", "addCommandScript");
+ digester.addObjectCreate("*/command", ComponentCommand.class);
+ digester.addBeanPropertySetter("*/command/exec");
+ digester.addBeanPropertySetter("*/command/name");
+ digester.addBeanPropertySetter("*/command/type");
+ digester.addSetNext("*/command", "addCommand");
+
digester.addObjectCreate("*/osSpecific", OSSpecific.class);
digester.addBeanPropertySetter("*/osSpecific/osType");
- digester.addObjectCreate("*/package", OSPackage.class);
- digester.addBeanPropertySetter("*/package/type");
- digester.addBeanPropertySetter("*/package/name");
- digester.addSetNext("*/package", "addOSPackage");
+ digester.addObjectCreate("*/osSpecific/packages/package", OSPackage.class);
+ digester.addBeanPropertySetter("*/osSpecific/packages/package/type");
+ digester.addBeanPropertySetter("*/osSpecific/packages/package/name");
+ digester.addSetNext("*/osSpecific/packages/package", "addOSPackage");
digester.addSetNext("*/osSpecific", "addOSSpecific");
+ digester.addObjectCreate("*/application/packages/package", Package.class);
+ digester.addBeanPropertySetter("*/application/packages/package/type");
+ digester.addBeanPropertySetter("*/application/packages/package/name");
+ digester.addSetNext("*/application/packages/package", "addPackage");
+
digester.addObjectCreate("*/configFile", ConfigFile.class);
digester.addBeanPropertySetter("*/configFile/type");
digester.addBeanPropertySetter("*/configFile/fileName");
diff --git a/slider-core/src/test/app_packages/test_am_config/appConfig.json b/slider-core/src/test/app_packages/test_am_config/appConfig.json
new file mode 100644
index 0000000..9ede591
--- /dev/null
+++ b/slider-core/src/test/app_packages/test_am_config/appConfig.json
@@ -0,0 +1,31 @@
+{
+ "schema": "http://example.org/specification/v2.0.0",
+ "metadata": {
+ },
+ "global": {
+ "am.config.generation": "true",
+ "site.global.application_id": "DateLogger",
+ "site.global.app_version": "1.0.0",
+ "site.global.app_root": "${AGENT_WORK_ROOT}/app/packages/command-logger",
+
+ "site.cl-site.logfile.location": "${AGENT_WORK_ROOT}/app/install/command-logger-app/operations.log",
+ "site.cl-site.datetime.format": "%A, %d. %B %Y %I:%M%p",
+ "site.cl-site.pattern.for.test.to.verify": "verify this pattern",
+
+ "site.client-json.clientkey": "clientval",
+ "site.test-json.jsonkey": "val1",
+ "site.test-xml.xmlkey": "val2",
+ "site.test-hadoop-xml.xmlkey": "val3",
+ "site.test-properties.propkey": "val4",
+ "site.test-yaml.yamlkey": "val5",
+ "site.test-env.content": "test ${envkey1} {{envkey2}} content",
+ "site.test-env.envkey1": "envval1",
+ "site.test-env.envkey2": "envval2",
+ "site.test-template.templatekey1": "templateval1",
+ "site.test-template.templatekey2": "templateval2"
+ },
+ "components": {
+ "COMMAND_LOGGER": {
+ }
+ }
+}
diff --git a/slider-core/src/test/app_packages/test_am_config/metainfo.json b/slider-core/src/test/app_packages/test_am_config/metainfo.json
new file mode 100644
index 0000000..0238fd3
--- /dev/null
+++ b/slider-core/src/test/app_packages/test_am_config/metainfo.json
@@ -0,0 +1,70 @@
+{
+ "schemaVersion": "2.1",
+ "application": {
+ "name": "DATE_LOGGER",
+ "components": [
+ {
+ "name": "DATE_LOGGER",
+ "commands": [
+ {
+ "exec": "echo \"Time: `date +{$conf:@//site/cl-site/datetime.format}` > {$conf:@//site/cl-site/logfile.location} 2>&1 && sleep 180000"
+ }
+ ]
+ },
+ {
+ "name": "DATE_LOGGER_CLIENT",
+ "category": "CLIENT",
+ "configFiles": [
+ {
+ "type": "json",
+ "fileName": "client.json",
+ "dictionaryName": "client-json"
+ }
+ ]
+ }
+ ],
+ "packages": [
+ {
+ "type": "archive",
+ "name": "test_am_config_generation.tgz"
+ }
+ ],
+ "configFiles": [
+ {
+ "type": "properties",
+ "fileName": "test.properties",
+ "dictionaryName": "test-properties"
+ },
+ {
+ "type": "template",
+ "fileName": "test.template",
+ "dictionaryName": "test-template"
+ },
+ {
+ "type": "json",
+ "fileName": "test.json",
+ "dictionaryName": "test-json"
+ },
+ {
+ "type": "env",
+ "fileName": "testenv",
+ "dictionaryName": "test-env"
+ },
+ {
+ "type": "hadoop-xml",
+ "fileName": "test-hadoop.xml",
+ "dictionaryName": "test-hadoop-xml"
+ },
+ {
+ "type": "xml",
+ "fileName": "test.xml",
+ "dictionaryName": "test-xml"
+ },
+ {
+ "type": "yaml",
+ "fileName": "test.yaml",
+ "dictionaryName": "test-yaml"
+ }
+ ]
+ }
+}
diff --git a/slider-core/src/test/app_packages/test_am_config/resources.json b/slider-core/src/test/app_packages/test_am_config/resources.json
new file mode 100644
index 0000000..72c2b7b
--- /dev/null
+++ b/slider-core/src/test/app_packages/test_am_config/resources.json
@@ -0,0 +1,17 @@
+{
+ "schema" : "http://example.org/specification/v2.0.0",
+ "metadata" : {
+ },
+ "global" : {
+ },
+ "components": {
+ "slider-appmaster": {
+ "yarn.memory": "256"
+ },
+ "DATE_LOGGER": {
+ "yarn.role.priority": "1",
+ "yarn.component.instances": "1",
+ "yarn.memory": "128"
+ }
+ }
+}
diff --git a/slider-core/src/test/app_packages/test_am_config/resources/test.template b/slider-core/src/test/app_packages/test_am_config/resources/test.template
new file mode 100644
index 0000000..1aff6bd
--- /dev/null
+++ b/slider-core/src/test/app_packages/test_am_config/resources/test.template
@@ -0,0 +1 @@
+test ${templatekey1} {{templatekey2}} content
diff --git a/slider-core/src/test/app_packages/test_am_config/test_archive/testfile b/slider-core/src/test/app_packages/test_am_config/test_archive/testfile
new file mode 100644
index 0000000..8cd6f83
--- /dev/null
+++ b/slider-core/src/test/app_packages/test_am_config/test_archive/testfile
@@ -0,0 +1 @@
+test archive contents
diff --git a/slider-core/src/test/app_packages/test_command_log/resources_unique_names.json b/slider-core/src/test/app_packages/test_command_log/resources_unique_names.json
new file mode 100644
index 0000000..46b0629
--- /dev/null
+++ b/slider-core/src/test/app_packages/test_command_log/resources_unique_names.json
@@ -0,0 +1,18 @@
+{
+ "schema": "http://example.org/specification/v2.0.0",
+ "metadata": {
+ },
+ "global": {
+ },
+ "components": {
+ "COMMAND_LOGGER": {
+ "component.unique.names": "true",
+ "yarn.memory": "128",
+ "yarn.role.priority": "1",
+ "yarn.component.instances": "2"
+ },
+ "slider-appmaster": {
+ "yarn.memory": "256"
+ }
+ }
+}
diff --git a/slider-core/src/test/java/org/apache/slider/core/registry/docstore/TestPublishedConfigurationOutputter.java b/slider-core/src/test/java/org/apache/slider/core/registry/docstore/TestPublishedConfigurationOutputter.java
new file mode 100644
index 0000000..63d5961
--- /dev/null
+++ b/slider-core/src/test/java/org/apache/slider/core/registry/docstore/TestPublishedConfigurationOutputter.java
@@ -0,0 +1,221 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.slider.core.registry.docstore;
+
+import com.google.common.base.Charsets;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.slider.common.tools.SliderFileSystem;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.powermock.api.easymock.PowerMock;
+import org.yaml.snakeyaml.Yaml;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.expect;
+import static org.mockito.Matchers.anyString;
+import static org.powermock.api.easymock.PowerMock.createNiceMock;
+
+public class TestPublishedConfigurationOutputter {
+ private static HashMap<String, String> config = new HashMap<>();
+
+ @Rule
+ public TemporaryFolder tmpDir = new TemporaryFolder();
+
+ @Before
+ public void setup() {
+ config.put("key1", "val1");
+ }
+
+ @Test
+ public void testJson() throws IOException {
+ PublishedConfigurationOutputter configurationOutputter =
+ PublishedConfigurationOutputter.createOutputter(ConfigFormat.JSON,
+ new PublishedConfiguration("description",
+ config.entrySet()));
+
+ String output = configurationOutputter.asString().replaceAll("( |\\r|\\n)",
+ "");
+ assert "{\"key1\":\"val1\"}".equals(output);
+
+ File file = tmpDir.newFile();
+ configurationOutputter.save(file);
+
+ ObjectMapper mapper = new ObjectMapper();
+ Map<String, String> read = mapper.readValue(file, Map.class);
+ assert 1 == read.size();
+ assert "val1".equals(read.get("key1"));
+ }
+
+ @Test
+ public void testXml() throws IOException {
+ PublishedConfigurationOutputter configurationOutputter =
+ PublishedConfigurationOutputter.createOutputter(ConfigFormat.XML,
+ new PublishedConfiguration("description",
+ config.entrySet()));
+
+ String output = configurationOutputter.asString().replaceAll("( |\\r|\\n)",
+ "");
+ assert output.contains(
+ "<configuration><property><name>key1</name><value>val1</value><source/></property></configuration>");
+
+ File file = tmpDir.newFile();
+ configurationOutputter.save(file);
+
+ assert FileUtils.readFileToString(file, Charsets.UTF_8)
+ .replaceAll("( |\\r|\\n)", "")
+ .contains(
+ "<configuration><property><name>key1</name><value>val1</value><source/></property></configuration>");
+ }
+
+ @Test
+ public void testHadoopXml() throws IOException {
+ PublishedConfigurationOutputter configurationOutputter =
+ PublishedConfigurationOutputter.createOutputter(ConfigFormat.HADOOP_XML,
+ new PublishedConfiguration("description",
+ config.entrySet()));
+
+ String output = configurationOutputter.asString().replaceAll("( |\\r|\\n)",
+ "");
+ assert output.contains("<configuration><property><name>key1</name><value>val1</value><source/></property></configuration>");
+
+ File file = tmpDir.newFile();
+ configurationOutputter.save(file);
+
+ assert FileUtils.readFileToString(file, Charsets.UTF_8)
+ .replaceAll("( |\\r|\\n)", "")
+ .contains( "<configuration><property><name>key1</name><value>val1</value><source/></property></configuration>");
+ }
+
+ @Test
+ public void testProperties() throws IOException {
+ PublishedConfigurationOutputter configurationOutputter =
+ PublishedConfigurationOutputter.createOutputter(ConfigFormat.PROPERTIES,
+ new PublishedConfiguration("description",
+ config.entrySet()));
+
+ String output = configurationOutputter.asString();
+ assert output.contains("key1=val1");
+
+ File file = tmpDir.newFile();
+ configurationOutputter.save(file);
+
+ Properties properties = new Properties();
+ FileInputStream fis = null;
+ try {
+ fis = new FileInputStream(file);
+ properties.load(fis);
+ } finally {
+ if (fis != null) {
+ fis.close();
+ }
+ }
+ assert 1 == properties.size();
+ assert "val1".equals(properties.getProperty("key1"));
+ }
+
+ @Test
+ public void testYaml() throws IOException {
+ PublishedConfigurationOutputter configurationOutputter =
+ PublishedConfigurationOutputter.createOutputter(ConfigFormat.YAML,
+ new PublishedConfiguration("description",
+ config.entrySet()));
+
+ String output = configurationOutputter.asString().replaceAll("(\\r|\\n)",
+ "");
+ assert "key1: val1".equals(output);
+
+ File file = tmpDir.newFile();
+ configurationOutputter.save(file);
+
+ Yaml yaml = new Yaml();
+ FileInputStream fis = null;
+ Map<String, String> read;
+ try {
+ fis = new FileInputStream(file);
+ read = (Map<String, String>) yaml.load(fis);
+ } finally {
+ if (fis != null) {
+ fis.close();
+ }
+ }
+ assert 1 == read.size();
+ assert "val1".equals(read.get("key1"));
+ }
+
+ @Test
+ public void testEnv() throws IOException {
+ HashMap<String, String> envConfig = new HashMap<>(config);
+ envConfig.put("content", "content {{key1}} ");
+
+ PublishedConfigurationOutputter configurationOutputter =
+ PublishedConfigurationOutputter.createOutputter(ConfigFormat.ENV,
+ new PublishedConfiguration("description",
+ envConfig.entrySet()));
+
+ String output = configurationOutputter.asString();
+ assert "content val1 ".equals(output);
+
+ File file = tmpDir.newFile();
+ configurationOutputter.save(file);
+
+ assert "content val1 ".equals(FileUtils.readFileToString(file,
+ Charsets.UTF_8));
+ }
+
+ @Test
+ public void testTemplate1() throws IOException {
+ HashMap<String, String> templateConfig = new HashMap<>(config);
+ templateConfig.put(ConfigUtils.TEMPLATE_FILE, "templateFileName");
+
+ SliderFileSystem fileSystem = createNiceMock(SliderFileSystem.class);
+ expect(fileSystem.buildResourcePath(anyString())).andReturn(new Path("path")).anyTimes();
+ expect(fileSystem.isFile(anyObject(Path.class))).andReturn(true).anyTimes();
+ expect(fileSystem.cat(anyObject(Path.class))).andReturn("content {{key1}}\n more ${key1} content").anyTimes();
+
+ PowerMock.replay(fileSystem);
+
+ ConfigUtils.prepConfigForTemplateOutputter(ConfigFormat.TEMPLATE,
+ templateConfig, fileSystem, "clusterName", null);
+ PublishedConfigurationOutputter configurationOutputter =
+ PublishedConfigurationOutputter.createOutputter(ConfigFormat.TEMPLATE,
+ new PublishedConfiguration("description",
+ templateConfig.entrySet()));
+
+ String output = configurationOutputter.asString();
+ assert "content val1\n more val1 content".equals(output);
+
+ File file = tmpDir.newFile();
+ configurationOutputter.save(file);
+
+ PowerMock.verify(fileSystem);
+
+ assert "content val1\n more val1 content".equals(
+ FileUtils.readFileToString(file, Charsets.UTF_8));
+ }
+}
diff --git a/slider-core/src/test/java/org/apache/slider/providers/agent/TestAgentClientProvider2.java b/slider-core/src/test/java/org/apache/slider/providers/agent/TestAgentClientProvider2.java
index b919fcf..b959e2f 100644
--- a/slider-core/src/test/java/org/apache/slider/providers/agent/TestAgentClientProvider2.java
+++ b/slider-core/src/test/java/org/apache/slider/providers/agent/TestAgentClientProvider2.java
@@ -239,7 +239,11 @@
public void testSliderClientForInstallFailures() throws Exception {
describe(" IGNORE ANY STACK TRACES BELOW ");
- SliderClient client = new SliderClient();
+ SliderClient client = PowerMock.createPartialMock(SliderClient.class,
+ "getRegistryOperations");
+ expect(client.getRegistryOperations()).andReturn(null).anyTimes();
+ PowerMock.replay(SliderClient.class);
+
client.bindArgs(new Configuration(), "client", "--dest", "a_random_path/none", "--package", "a_random_pkg.zip");
ActionClientArgs args = new ActionClientArgs();
@@ -287,5 +291,7 @@
assertExceptionDetails(e, SliderExitCodes.EXIT_BAD_CONFIGURATION,
AgentClientProvider.E_COULD_NOT_READ_METAINFO);
}
+
+ PowerMock.verify(SliderClient.class);
}
}
diff --git a/slider-core/src/test/java/org/apache/slider/providers/agent/TestAgentProviderService.java b/slider-core/src/test/java/org/apache/slider/providers/agent/TestAgentProviderService.java
index 9fbb3d0..af1c61f 100644
--- a/slider-core/src/test/java/org/apache/slider/providers/agent/TestAgentProviderService.java
+++ b/slider-core/src/test/java/org/apache/slider/providers/agent/TestAgentProviderService.java
@@ -88,11 +88,13 @@
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
+import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -285,6 +287,7 @@
ProviderRole role = new ProviderRole("HBASE_MASTER", 1);
SliderFileSystem sliderFileSystem = createNiceMock(SliderFileSystem.class);
ContainerLauncher launcher = createNiceMock(ContainerLauncher.class);
+ expect(launcher.getEnv()).andReturn(new MapOperations()).anyTimes();
Path generatedConfPath = new Path(".", "test");
MapOperations resourceComponent = new MapOperations();
MapOperations appComponent = new MapOperations();
@@ -347,6 +350,9 @@
anyString(),
anyString()
);
+
+ doReturn(Collections.emptyMap()).when(mockAps).getRoleClusterNodeMapping();
+
expect(access.isApplicationLive()).andReturn(true).anyTimes();
ClusterDescription desc = new ClusterDescription();
desc.setOption(OptionKeys.ZOOKEEPER_QUORUM, "host1:2181");
@@ -357,16 +363,15 @@
expect(access.getRoleClusterNodeMapping()).andReturn(cnMap).anyTimes();
- AggregateConf aggConf = new AggregateConf();
- ConfTreeOperations treeOps = aggConf.getAppConfOperations();
+ ConfTreeOperations treeOps = instanceDefinition.getAppConfOperations();
treeOps.getOrAddComponent("HBASE_MASTER").put(AgentKeys.WAIT_HEARTBEAT, "0");
treeOps.set(OptionKeys.APPLICATION_NAME, "HBASE");
treeOps.set("site.fs.defaultFS", "hdfs://HOST1:8020/");
treeOps.set("internal.data.dir.path", "hdfs://HOST1:8020/database");
- expect(access.getInstanceDefinitionSnapshot()).andReturn(aggConf);
+ expect(access.getInstanceDefinitionSnapshot()).andReturn(instanceDefinition);
expect(access.getInternalsSnapshot()).andReturn(treeOps).anyTimes();
expect(access.getAppConfSnapshot()).andReturn(treeOps).anyTimes();
- replay(access, ctx, container, sliderFileSystem, mockFs);
+ replay(access, ctx, container, sliderFileSystem, mockFs, launcher);
try {
mockAps.buildContainerLaunchContext(launcher,
@@ -1225,6 +1230,8 @@
SliderFileSystem sliderFileSystem = createNiceMock(SliderFileSystem.class);
ContainerLauncher launcher = createNiceMock(ContainerLauncher.class);
ContainerLauncher launcher2 = createNiceMock(ContainerLauncher.class);
+ expect(launcher.getEnv()).andReturn(new MapOperations()).anyTimes();
+ expect(launcher2.getEnv()).andReturn(new MapOperations()).anyTimes();
Path generatedConfPath = new Path(".", "test");
MapOperations resourceComponent = new MapOperations();
MapOperations appComponent = new MapOperations();
@@ -1291,18 +1298,28 @@
desc.setInfo(OptionKeys.APPLICATION_NAME, "HBASE");
expect(access.getClusterStatus()).andReturn(desc).anyTimes();
- AggregateConf aggConf = new AggregateConf();
- ConfTreeOperations treeOps = aggConf.getAppConfOperations();
+ ConfTreeOperations treeOps = instanceDefinition.getAppConfOperations();
treeOps.getOrAddComponent("HBASE_MASTER").put(AgentKeys.WAIT_HEARTBEAT, "0");
- treeOps.getOrAddComponent("HBASE_REGIONSERVER").put(AgentKeys.WAIT_HEARTBEAT, "0");
+ treeOps.getOrAddComponent("HBASE_REGIONSERVER").put(
+ AgentKeys.WAIT_HEARTBEAT, "0");
treeOps.set(OptionKeys.APPLICATION_NAME, "HBASE");
treeOps.set("site.fs.defaultFS", "hdfs://HOST1:8020/");
treeOps.set("internal.data.dir.path", "hdfs://HOST1:8020/database");
- expect(access.getInstanceDefinitionSnapshot()).andReturn(aggConf).anyTimes();
+ expect(access.getInstanceDefinitionSnapshot()).andReturn(instanceDefinition).anyTimes();
expect(access.getInternalsSnapshot()).andReturn(treeOps).anyTimes();
expect(access.getAppConfSnapshot()).andReturn(treeOps).anyTimes();
- doNothing().when(mockAps).publishApplicationInstanceData(anyString(), anyString(), anyCollection());
- replay(access, ctx, container, sliderFileSystem, mockFs);
+ doNothing().when(mockAps).publishApplicationInstanceData(anyString(),
+ anyString(), anyCollection());
+ doNothing().when(mockAps).localizeConfigFiles(
+ (ContainerLauncher)Matchers.anyObject(),
+ anyString(),
+ anyString(),
+ (Metainfo)Matchers.anyObject(),
+ anyMap(),
+ (MapOperations)Matchers.anyObject(),
+ (SliderFileSystem)Matchers.anyObject());
+ doReturn(Collections.emptyMap()).when(mockAps).getRoleClusterNodeMapping();
+ replay(access, ctx, container, sliderFileSystem, mockFs, launcher, launcher2);
// build two containers
try {
@@ -1849,12 +1866,12 @@
AgentProviderService aps = createAgentProviderService(new Configuration());
Map<String, Map<String, String>> allConfigs = new HashMap<String, Map<String, String>>();
Map<String, String> cfg1 = new HashMap<String, String>();
- cfg1.put("a1", "${@//site/cfg-2/A1}");
+ cfg1.put("a1", "0${@//site/cfg-2/A1}");
cfg1.put("b1", "22");
cfg1.put("c1", "33");
cfg1.put("d1", "${@//site/cfg1/c1}AA");
Map<String, String> cfg2 = new HashMap<String, String>();
- cfg2.put("A1", "11");
+ cfg2.put("A1", "11${@//site/cfg1/b1}");
cfg2.put("B1", "${@//site/cfg-2/A1},${@//site/cfg-2/A1},AA,${@//site/cfg1/c1}");
cfg2.put("C1", "DD${@//site/cfg1/c1}");
cfg2.put("D1", "${14}");
@@ -1862,15 +1879,30 @@
allConfigs.put("cfg1", cfg1);
allConfigs.put("cfg-2", cfg2);
aps.dereferenceAllConfigs(allConfigs);
- Assert.assertEquals("11", cfg1.get("a1"));
+ Assert.assertEquals("01122", cfg1.get("a1"));
Assert.assertEquals("22", cfg1.get("b1"));
Assert.assertEquals("33", cfg1.get("c1"));
Assert.assertEquals("33AA", cfg1.get("d1"));
- Assert.assertEquals("11", cfg2.get("A1"));
- Assert.assertEquals("11,11,AA,33", cfg2.get("B1"));
+ Assert.assertEquals("1122", cfg2.get("A1"));
+ Assert.assertEquals("1122,1122,AA,33", cfg2.get("B1"));
Assert.assertEquals("DD33", cfg2.get("C1"));
Assert.assertEquals("${14}", cfg2.get("D1"));
}
+ @Test
+ public void testDereferenceAllConfigLoop() throws IOException {
+ AgentProviderService aps = createAgentProviderService(new Configuration());
+ Map<String, Map<String, String>> allConfigs = new HashMap<String, Map<String, String>>();
+ Map<String, String> cfg1 = new HashMap<String, String>();
+ cfg1.put("a1", "0${@//site/cfg-2/A1}");
+ Map<String, String> cfg2 = new HashMap<String, String>();
+ cfg2.put("A1", "11${@//site/cfg1/a1}");
+
+ allConfigs.put("cfg1", cfg1);
+ allConfigs.put("cfg-2", cfg2);
+ aps.dereferenceAllConfigs(allConfigs);
+ Assert.assertEquals("0${@//site/cfg-2/A1}", cfg1.get("a1"));
+ Assert.assertEquals("11${@//site/cfg1/a1}", cfg2.get("A1"));
+ }
}
diff --git a/slider-core/src/test/java/org/apache/slider/providers/agent/application/metadata/TestMetainfoParser.java b/slider-core/src/test/java/org/apache/slider/providers/agent/application/metadata/TestMetainfoParser.java
index 30283d1..ba1912a 100644
--- a/slider-core/src/test/java/org/apache/slider/providers/agent/application/metadata/TestMetainfoParser.java
+++ b/slider-core/src/test/java/org/apache/slider/providers/agent/application/metadata/TestMetainfoParser.java
@@ -50,7 +50,7 @@
Assert.assertNotNull(metainfo.getApplication());
Application application = metainfo.getApplication();
assert "STORM".equals(application.getName());
- assert 5 == application.getComponents().size();
+ assert 6 == application.getComponents().size();
OSPackage pkg = application.getOSSpecifics().get(0).getPackages().get(0);
assert "tarball".equals(pkg.getType());
assert "files/apache-storm-0.9.1.2.1.1.0-237.tar.gz".equals(pkg.getName());
@@ -63,9 +63,20 @@
if (comp != null && comp.getName().equals("SUPERVISOR")) {
Assert.assertEquals(1, comp.getComponentExports().size());
}
+ if (comp != null && comp.getName().equals("ANOTHER_COMPONENT")) {
+ assert 2 == comp.getCommands().size();
+ assert "start command".equals(comp.getCommands().get(0).getExec());
+ assert "START".equals(comp.getCommands().get(0).getName());
+ assert "stop command".equals(comp.getCommands().get(1).getExec());
+ assert "STOP".equals(comp.getCommands().get(1).getName());
+ }
}
assert found;
Assert.assertEquals(0, application.getConfigFiles().size());
+ assert 1 == application.getPackages().size();
+ Package p = application.getPackages().get(0);
+ assert "tarball".equals(p.getType());
+ assert "test-tarball-name.tgz".equals(p.getName());
}
@Test
diff --git a/slider-core/src/test/resources/org/apache/slider/providers/agent/application/metadata/metainfo.xml b/slider-core/src/test/resources/org/apache/slider/providers/agent/application/metadata/metainfo.xml
index d9004ad..fbe9299 100644
--- a/slider-core/src/test/resources/org/apache/slider/providers/agent/application/metadata/metainfo.xml
+++ b/slider-core/src/test/resources/org/apache/slider/providers/agent/application/metadata/metainfo.xml
@@ -142,6 +142,20 @@
<timeout>600</timeout>
</commandScript>
</component>
+
+ <component>
+ <name>ANOTHER_COMPONENT</name>
+ <category>MASTER</category>
+ <commands>
+ <command>
+ <exec>start command</exec>
+ </command>
+ <command>
+ <exec>stop command</exec>
+ <name>STOP</name>
+ </command>
+ </commands>
+ </component>
</components>
<osSpecifics>
@@ -155,5 +169,12 @@
</packages>
</osSpecific>
</osSpecifics>
+
+ <packages>
+ <package>
+ <type>tarball</type>
+ <name>test-tarball-name.tgz</name>
+ </package>
+ </packages>
</application>
</metainfo>
diff --git a/slider-funtest/src/test/groovy/org/apache/slider/funtest/ResourcePaths.groovy b/slider-funtest/src/test/groovy/org/apache/slider/funtest/ResourcePaths.groovy
index 5de2b8e..c0aa06a 100644
--- a/slider-funtest/src/test/groovy/org/apache/slider/funtest/ResourcePaths.groovy
+++ b/slider-funtest/src/test/groovy/org/apache/slider/funtest/ResourcePaths.groovy
@@ -38,4 +38,9 @@
String SLEEP_META = "$SLIDER_CORE_APP_PACKAGES/test_min_pkg/sleep_cmd/metainfo.json"
String SLEEP_APPCONFIG = "$SLIDER_CORE_APP_PACKAGES/test_min_pkg/sleep_cmd/appConfig.json"
+ String AM_CONFIG_RESOURCES = "$SLIDER_CORE_APP_PACKAGES/test_am_config/resources.json"
+ String AM_CONFIG_META = "$SLIDER_CORE_APP_PACKAGES/test_am_config/metainfo.json"
+ String AM_CONFIG_APPCONFIG = "$SLIDER_CORE_APP_PACKAGES/test_am_config/appConfig.json"
+
+ String UNIQUE_COMPONENT_RESOURCES = "$SLIDER_CORE_APP_PACKAGES/test_command_log/resources_unique_names.json"
}
\ No newline at end of file
diff --git a/slider-funtest/src/test/groovy/org/apache/slider/funtest/misc/AMConfigPublishingIT.groovy b/slider-funtest/src/test/groovy/org/apache/slider/funtest/misc/AMConfigPublishingIT.groovy
new file mode 100644
index 0000000..e84ada6
--- /dev/null
+++ b/slider-funtest/src/test/groovy/org/apache/slider/funtest/misc/AMConfigPublishingIT.groovy
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.slider.funtest.misc
+
+import groovy.transform.CompileStatic
+import groovy.util.logging.Slf4j
+import org.apache.slider.common.SliderExitCodes
+import org.apache.slider.common.params.Arguments
+import org.apache.slider.common.params.SliderActions
+import org.apache.slider.common.tools.SliderUtils
+import org.apache.slider.funtest.ResourcePaths
+import org.apache.slider.funtest.framework.AgentCommandTestBase
+import org.apache.slider.funtest.framework.FuntestProperties
+import org.apache.slider.funtest.framework.SliderShell
+import org.junit.After
+import org.junit.Before
+import org.junit.Test
+
+@CompileStatic
+@Slf4j
+public class AMConfigPublishingIT extends AgentCommandTestBase
+implements FuntestProperties, Arguments, SliderExitCodes, SliderActions {
+
+ private static String DATE_LOGGER = "DATE_LOGGER"
+ private static String APP_NAME = "am-config-publishing"
+ private static String APP_METAINFO = ResourcePaths.AM_CONFIG_META
+ private static String APP_RESOURCE = ResourcePaths.AM_CONFIG_RESOURCES
+ private static String APP_TEMPLATE = ResourcePaths.AM_CONFIG_APPCONFIG
+ private static String CLIENT_CONFIG = "../slider-core/src/test/app_packages/test_am_config/clientInstallConfig-default.json"
+ private static String RESOURCE_DIR = "../slider-core/src/test/app_packages/test_am_config/resources"
+ private static String TGZ_SOURCE = "../slider-core/src/test/app_packages/test_am_config/test_archive"
+ private static String TGZ_FILE = "test_am_config_generation.tgz"
+ private static String TGZ_DIR = "target/package-tmp/"
+
+ private static String CLIENT_INSTALL_DIR = "target/am-config-client"
+
+ private HashMap<String, String> files =
+ ["client.json": """{ "clientkey" : "clientval"}""",
+ "test.json": """{ "jsonkey" : "val1"}""",
+ "test.xml": "<configuration><property><name>xmlkey</name><value>val2</value><source/></property></configuration>",
+ "test-hadoop.xml": "<configuration><property><name>xmlkey</name><value>val3</value><source/></property></configuration>",
+ "test.properties": "propkey=val4",
+ "test.yaml": "yamlkey: val5",
+ "test.template": "test templateval1 templateval2 content",
+ "testenv": "test envval1 envval2 content",
+ "testfile": "test archive contents"
+ ]
+
+ @Before
+ public void prepareCluster() {
+ setupCluster(APP_NAME)
+ }
+
+ @Before
+ public void setupApplicationPackage() {
+ File tgzFile = new File(TGZ_DIR + TGZ_FILE);
+ SliderUtils.tarGzipFolder(new File(TGZ_SOURCE), tgzFile, null);
+ try {
+ tgzFile = tgzFile.canonicalFile
+ SliderShell shell = slider(EXIT_SUCCESS,
+ [
+ ACTION_RESOURCE,
+ ARG_INSTALL, ARG_RESOURCE, tgzFile.absolutePath,
+ ARG_OVERWRITE
+ ])
+ logShell(shell)
+ shell = slider(EXIT_SUCCESS,
+ [
+ ACTION_RESOURCE,
+ ARG_INSTALL, ARG_RESOURCE, RESOURCE_DIR,
+ ARG_DESTDIR, APP_NAME,
+ ARG_OVERWRITE
+ ])
+ logShell(shell)
+ log.info "Resources uploaded at home directory .slider/resources"
+ } catch (Exception e) {
+ setup_failed = true
+ throw e;
+ }
+ File dir = new File(CLIENT_INSTALL_DIR)
+ if (!dir.exists()) {
+ dir.mkdir()
+ }
+ }
+
+
+ @After
+ public void destroyCluster() {
+ cleanup(APP_NAME)
+ }
+
+ @Test
+ public void testCreate() throws Throwable {
+ assumeAgentTestsEnabled()
+
+ describe APP_NAME
+
+ def path = buildClusterPath(APP_NAME)
+ assert !clusterFS.exists(path)
+
+ slider(EXIT_SUCCESS,
+ [
+ ACTION_CREATE, APP_NAME,
+ ARG_TEMPLATE, APP_TEMPLATE, ARG_RESOURCES, APP_RESOURCE,
+ ARG_METAINFO, APP_METAINFO
+ ])
+ ensureApplicationIsUp(APP_NAME)
+
+ expectLiveContainerCountReached(APP_NAME, DATE_LOGGER, 1,
+ CONTAINER_LAUNCH_TIMEOUT)
+ status(0, APP_NAME)
+
+ SliderShell shell = slider(EXIT_SUCCESS,
+ [
+ ACTION_CLIENT, ARG_INSTALL,
+ ARG_DEST, CLIENT_INSTALL_DIR,
+ ARG_NAME, APP_NAME
+ ])
+ logShell(shell)
+
+ for (Map.Entry<String, String> entry : files.entrySet()) {
+ String name = entry.getKey();
+ File file = new File(CLIENT_INSTALL_DIR + "/" + name)
+ assert file.exists()
+ String contents = file.text.replaceAll("(\\r|\\n)", "")
+ assert contents.contains(entry.getValue()), "$name didn't contain value"
+ }
+ }
+
+}
diff --git a/slider-funtest/src/test/groovy/org/apache/slider/funtest/misc/UniqueComponentNamesIT.groovy b/slider-funtest/src/test/groovy/org/apache/slider/funtest/misc/UniqueComponentNamesIT.groovy
new file mode 100644
index 0000000..110de22
--- /dev/null
+++ b/slider-funtest/src/test/groovy/org/apache/slider/funtest/misc/UniqueComponentNamesIT.groovy
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.slider.funtest.misc
+
+import groovy.transform.CompileStatic
+import groovy.util.logging.Slf4j
+import org.apache.slider.api.ClusterDescription
+import org.apache.slider.common.SliderExitCodes
+import org.apache.slider.common.SliderKeys
+import org.apache.slider.common.params.Arguments
+import org.apache.slider.common.params.SliderActions
+import org.apache.slider.funtest.ResourcePaths
+import org.apache.slider.funtest.framework.AgentCommandTestBase
+import org.apache.slider.funtest.framework.FuntestProperties
+import org.apache.slider.funtest.framework.SliderShell
+import org.junit.After
+import org.junit.Before
+import org.junit.Test
+
+@CompileStatic
+@Slf4j
+public class UniqueComponentNamesIT extends AgentCommandTestBase
+implements FuntestProperties, Arguments, SliderExitCodes, SliderActions {
+
+ private static String COMMAND_LOGGER = "COMMAND_LOGGER"
+ private static String COMMAND_LOGGER1 = COMMAND_LOGGER + "1"
+ private static String COMMAND_LOGGER2 = COMMAND_LOGGER + "2"
+ private static String COMMAND_LOGGER3 = COMMAND_LOGGER + "3"
+ private static String APPLICATION_NAME = "unique-component-names"
+ private static String APP_RESOURCE = ResourcePaths.UNIQUE_COMPONENT_RESOURCES
+
+ @Before
+ public void prepareCluster() {
+ setupCluster(APPLICATION_NAME)
+ }
+
+ @After
+ public void destroyCluster() {
+ cleanup(APPLICATION_NAME)
+ }
+
+ @Test
+ public void testCreateFlex() throws Throwable {
+ assumeAgentTestsEnabled()
+
+ describe APPLICATION_NAME
+
+ def path = buildClusterPath(APPLICATION_NAME)
+ assert !clusterFS.exists(path)
+
+ File launchReportFile = createTempJsonFile();
+ SliderShell shell = createTemplatedSliderApplication(APPLICATION_NAME,
+ APP_TEMPLATE,
+ APP_RESOURCE,
+ [],
+ launchReportFile)
+ logShell(shell)
+
+ ensureYarnApplicationIsUp(launchReportFile)
+ ensureApplicationIsUp(APPLICATION_NAME)
+
+ ClusterDescription cd = execStatus(APPLICATION_NAME)
+
+ assert 3 == cd.statistics.size()
+ assert cd.statistics.keySet().containsAll([SliderKeys.COMPONENT_AM, COMMAND_LOGGER1, COMMAND_LOGGER2])
+
+ slider(EXIT_SUCCESS,
+ [
+ ACTION_FLEX,
+ APPLICATION_NAME,
+ ARG_COMPONENT,
+ COMMAND_LOGGER,
+ "3"
+ ])
+
+ sleep(1000 * 10)
+
+ status(0, APPLICATION_NAME)
+ expectLiveContainerCountReached(APPLICATION_NAME, COMMAND_LOGGER3, 1,
+ CONTAINER_LAUNCH_TIMEOUT)
+ expectLiveContainerCountReached(APPLICATION_NAME, COMMAND_LOGGER2, 1,
+ CONTAINER_LAUNCH_TIMEOUT)
+ expectLiveContainerCountReached(APPLICATION_NAME, COMMAND_LOGGER1, 1,
+ CONTAINER_LAUNCH_TIMEOUT)
+
+ slider(EXIT_SUCCESS,
+ [
+ ACTION_FLEX,
+ APPLICATION_NAME,
+ ARG_COMPONENT,
+ COMMAND_LOGGER,
+ "1"
+ ])
+
+ sleep(1000 * 10)
+
+ status(0, APPLICATION_NAME)
+ expectLiveContainerCountReached(APPLICATION_NAME, COMMAND_LOGGER3, 0,
+ CONTAINER_LAUNCH_TIMEOUT)
+ expectLiveContainerCountReached(APPLICATION_NAME, COMMAND_LOGGER2, 0,
+ CONTAINER_LAUNCH_TIMEOUT)
+ expectLiveContainerCountReached(APPLICATION_NAME, COMMAND_LOGGER1, 1,
+ CONTAINER_LAUNCH_TIMEOUT)
+ }
+
+}