blob: 1a54b733c631b860e853d5e3e51b9a6c12c43128 [file] [log] [blame]
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2?fields=tasks/*",
"Requests" : {
"id" : 2,
"cluster_name" : "mycluster"
},
"tasks" : [
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/33",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Gmond/Hdp::Exec[hdp-gmond service]/Exec[hdp-gmond service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 5.72 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Gmond/Hdp::Exec[hdp-gmond service]/Exec[hdp-gmond service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 5.72 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host2",
"id" : 33,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "GANGLIA_MONITOR",
"start_time" : 1352864090181,
"stage_id" : 1
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/30",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}a973d9bcff056aeff7f22221886c84b7' to '{md5}df2d55356b238461af57fe22ad993e4d'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/hdfs-site.xml]/content: content changed '{md5}54edf0ba61f6501cc49c0d7788b266b1' to '{md5}b25bda7a405235227d20732f0972c5f6'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Datanode/Hdp-hadoop::Service[datanode]/Hdp::Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.00 seconds\u001B[0m\n\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}a973d9bcff056aeff7f22221886c84b7' to '{md5}df2d55356b238461af57fe22ad993e4d'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/hdfs-site.xml]/content: content changed '{md5}54edf0ba61f6501cc49c0d7788b266b1' to '{md5}b25bda7a405235227d20732f0972c5f6'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Datanode/Hdp-hadoop::Service[datanode]/Hdp::Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.00 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host4",
"id" : 30,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "DATANODE",
"start_time" : 1352864090068,
"stage_id" : 1
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/38",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Hdfs::Service_check/Hdp-hadoop::Exec-hadoop[hdfs::service_check::check_safemode]/Hdp::Exec[hadoop --config /etc/hadoop/conf dfsadmin -safemode get | grep OFF]/Exec[hadoop --config /etc/hadoop/conf dfsadmin -safemode get | grep OFF]/returns: Safe mode is OFF\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Hdfs::Service_check/Hdp-hadoop::Exec-hadoop[hdfs::service_check::check_safemode]/Hdp::Exec[hadoop --config /etc/hadoop/conf dfsadmin -safemode get | grep OFF]/Exec[hadoop --config /etc/hadoop/conf dfsadmin -safemode get | grep OFF]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Hdfs::Service_check/Hdp-hadoop::Exec-hadoop[hdfs::service_check::create_file]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -rm /tmp/id280a7781_date381312; hadoop fs -put /etc/passwd /tmp/id280a7781_date381312]/Exec[hadoop --config /etc/hadoop/conf fs -rm /tmp/id280a7781_date381312; hadoop fs -put /etc/passwd /tmp/id280a7781_date381312]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Hdfs::Service_check/Hdp-hadoop::Exec-hadoop[hdfs::service_check::test]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -test -e /tmp/id280a7781_date381312]/Exec[hadoop --config /etc/hadoop/conf fs -test -e /tmp/id280a7781_date381312]: Triggered 'refresh' from 1 events\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 10.35 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Hdfs::Service_check/Hdp-hadoop::Exec-hadoop[hdfs::service_check::check_safemode]/Hdp::Exec[hadoop --config /etc/hadoop/conf dfsadmin -safemode get | grep OFF]/Exec[hadoop --config /etc/hadoop/conf dfsadmin -safemode get | grep OFF]/returns: Safe mode is OFF\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Hdfs::Service_check/Hdp-hadoop::Exec-hadoop[hdfs::service_check::check_safemode]/Hdp::Exec[hadoop --config /etc/hadoop/conf dfsadmin -safemode get | grep OFF]/Exec[hadoop --config /etc/hadoop/conf dfsadmin -safemode get | grep OFF]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Hdfs::Service_check/Hdp-hadoop::Exec-hadoop[hdfs::service_check::create_file]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -rm /tmp/id280a7781_date381312; hadoop fs -put /etc/passwd /tmp/id280a7781_date381312]/Exec[hadoop --config /etc/hadoop/conf fs -rm /tmp/id280a7781_date381312; hadoop fs -put /etc/passwd /tmp/id280a7781_date381312]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Hdfs::Service_check/Hdp-hadoop::Exec-hadoop[hdfs::service_check::test]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -test -e /tmp/id280a7781_date381312]/Exec[hadoop --config /etc/hadoop/conf fs -test -e /tmp/id280a7781_date381312]: Triggered 'refresh' from 1 events\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 10.35 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host2",
"id" : 38,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "EXECUTE",
"role" : "HDFS_SERVICE_CHECK",
"start_time" : 1352864269616,
"stage_id" : 2
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/26",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Gmond/Hdp::Exec[hdp-gmond service]/Exec[hdp-gmond service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 7.68 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Gmond/Hdp::Exec[hdp-gmond service]/Exec[hdp-gmond service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 7.68 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host5",
"id" : 26,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "GANGLIA_MONITOR",
"start_time" : 1352864089836,
"stage_id" : 1
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/24",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}a39a2689e76538c6d9090b00ceb04eb0' to '{md5}9786ed97b221e37075bdb64400bc804a'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/hdfs-site.xml]/content: content changed '{md5}c14eb8ab2bb5ab75789c875534ab64f4' to '{md5}9684de67c2a8fa0f7292418d6c0c1651'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Datanode/Hdp-hadoop::Service[datanode]/Hdp::Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.78 seconds\u001B[0m\n\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}a39a2689e76538c6d9090b00ceb04eb0' to '{md5}9786ed97b221e37075bdb64400bc804a'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/hdfs-site.xml]/content: content changed '{md5}c14eb8ab2bb5ab75789c875534ab64f4' to '{md5}9684de67c2a8fa0f7292418d6c0c1651'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Datanode/Hdp-hadoop::Service[datanode]/Hdp::Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.78 seconds\u001B[0m",
"status" : "FAILED",
"stderr" : "none",
"host_name" : "host1",
"id" : 24,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "DATANODE",
"start_time" : 1352864089661,
"stage_id" : 1
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/35",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}4673b67e078cc9d84ffc4873e5198edf' to '{md5}654e54e7c3f58aa3d37d07110ad63bb5'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/hdfs-site.xml]/content: content changed '{md5}d3b2d5e47669c948fccb907fa32c2b55' to '{md5}0e079fd5bc7cc43a35b60012c9ee00d9'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Snamenode/Hdp-hadoop::Service[secondarynamenode]/Hdp::Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode']/Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.45 seconds\u001B[0m\n\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}4673b67e078cc9d84ffc4873e5198edf' to '{md5}654e54e7c3f58aa3d37d07110ad63bb5'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/hdfs-site.xml]/content: content changed '{md5}d3b2d5e47669c948fccb907fa32c2b55' to '{md5}0e079fd5bc7cc43a35b60012c9ee00d9'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Snamenode/Hdp-hadoop::Service[secondarynamenode]/Hdp::Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode']/Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.45 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host3",
"id" : 35,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "SECONDARY_NAMENODE",
"start_time" : 1352864269474,
"stage_id" : 2
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/40",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;33mwarning: Dynamic lookup of $lzo_enabled at /var/lib/ambari-agent/puppet/modules/hdp-oozie/manifests/service.pp:37 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ensure at /var/lib/ambari-agent/puppet/modules/hdp-oozie/manifests/service.pp:76 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/oozie/conf/oozie-site.xml]/content: content changed '{md5}827a6e7bd4233d4dc82b20761aed1e30' to '{md5}4e59b973cec0811615008a580244bcdb'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Service/Hdp::Exec[/bin/sh -c 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz && mkdir -p /var/tmp/oozie && chown oozie:hadoop /var/tmp/oozie && cd /var/tmp/oozie' && su - oozie -c '/usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 /usr/lib/hadoop/ -extjs /usr/share/HDP-oozie/ext.zip -jars /usr/lib/hadoop/lib/hadoop-lzo-0.5.0.jar && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run ; hadoop dfs -put /usr/lib/oozie/share share ; hadoop dfs -chmod -R 755 /user/oozie/share && /usr/lib/oozie/bin/oozie-start.sh' ]/Exec[/bin/sh -c 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz && mkdir -p /var/tmp/oozie && chown oozie:hadoop /var/tmp/oozie && cd /var/tmp/oozie' && su - oozie -c '/usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 /usr/lib/hadoop/ -extjs /usr/share/HDP-oozie/ext.zip -jars /usr/lib/hadoop/lib/hadoop-lzo-0.5.0.jar && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run ; hadoop dfs -put /usr/lib/oozie/share share ; hadoop dfs -chmod -R 755 /user/oozie/share && /usr/lib/oozie/bin/oozie-start.sh' ]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 36.67 seconds\u001B[0m\n\n\u001B[0;33mwarning: Dynamic lookup of $lzo_enabled at /var/lib/ambari-agent/puppet/modules/hdp-oozie/manifests/service.pp:37 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ensure at /var/lib/ambari-agent/puppet/modules/hdp-oozie/manifests/service.pp:76 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/oozie/conf/oozie-site.xml]/content: content changed '{md5}827a6e7bd4233d4dc82b20761aed1e30' to '{md5}4e59b973cec0811615008a580244bcdb'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Service/Hdp::Exec[/bin/sh -c 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz && mkdir -p /var/tmp/oozie && chown oozie:hadoop /var/tmp/oozie && cd /var/tmp/oozie' && su - oozie -c '/usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 /usr/lib/hadoop/ -extjs /usr/share/HDP-oozie/ext.zip -jars /usr/lib/hadoop/lib/hadoop-lzo-0.5.0.jar && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run ; hadoop dfs -put /usr/lib/oozie/share share ; hadoop dfs -chmod -R 755 /user/oozie/share && /usr/lib/oozie/bin/oozie-start.sh' ]/Exec[/bin/sh -c 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz && mkdir -p /var/tmp/oozie && chown oozie:hadoop /var/tmp/oozie && cd /var/tmp/oozie' && su - oozie -c '/usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 /usr/lib/hadoop/ -extjs /usr/share/HDP-oozie/ext.zip -jars /usr/lib/hadoop/lib/hadoop-lzo-0.5.0.jar && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run ; hadoop dfs -put /usr/lib/oozie/share share ; hadoop dfs -chmod -R 755 /user/oozie/share && /usr/lib/oozie/bin/oozie-start.sh' ]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 36.67 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host3",
"id" : 40,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "OOZIE_SERVER",
"start_time" : 1352864331712,
"stage_id" : 3
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/31",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Gmond/Hdp::Exec[hdp-gmond service]/Exec[hdp-gmond service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.15 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Gmond/Hdp::Exec[hdp-gmond service]/Exec[hdp-gmond service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.15 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host4",
"id" : 31,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "GANGLIA_MONITOR",
"start_time" : 1352864090105,
"stage_id" : 1
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/27",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server/Hdp-ganglia::Config::Generate_server[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker -m]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker -m]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server/Hdp-ganglia::Config::Generate_server[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode -m]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode -m]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server/Hdp-ganglia::Config::Generate_server[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves -m]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves -m]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server/Hdp-ganglia::Config::Generate_server[gmetad]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -t]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -t]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server/Hdp-ganglia::Config::Generate_server[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster -m]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster -m]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server::Gmetad/Hdp::Exec[hdp-gmetad service]/Exec[hdp-gmetad service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 8.14 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server/Hdp-ganglia::Config::Generate_server[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker -m]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker -m]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server/Hdp-ganglia::Config::Generate_server[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode -m]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode -m]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server/Hdp-ganglia::Config::Generate_server[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves -m]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves -m]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server/Hdp-ganglia::Config::Generate_server[gmetad]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -t]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -t]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server/Hdp-ganglia::Config::Generate_server[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster -m]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster -m]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Server::Gmetad/Hdp::Exec[hdp-gmetad service]/Exec[hdp-gmetad service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 8.14 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host5",
"id" : 27,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "GANGLIA_SERVER",
"start_time" : 1352864089883,
"stage_id" : 1
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/43",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-sqoop::Sqoop::Service_check/Exec[sqoop_smoke]/returns: Sqoop 1.4.2.1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-sqoop::Sqoop::Service_check/Exec[sqoop_smoke]/returns: git commit id ea3b95785b3daf62c68f1eb0e645636acc00d0c2\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-sqoop::Sqoop::Service_check/Exec[sqoop_smoke]/returns: Compiled by jenkins on Sat Nov 10 19:14:01 PST 2012\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-sqoop::Sqoop::Service_check/Exec[sqoop_smoke]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 3.15 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-sqoop::Sqoop::Service_check/Exec[sqoop_smoke]/returns: Sqoop 1.4.2.1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-sqoop::Sqoop::Service_check/Exec[sqoop_smoke]/returns: git commit id ea3b95785b3daf62c68f1eb0e645636acc00d0c2\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-sqoop::Sqoop::Service_check/Exec[sqoop_smoke]/returns: Compiled by jenkins on Sat Nov 10 19:14:01 PST 2012\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-sqoop::Sqoop::Service_check/Exec[sqoop_smoke]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 3.15 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host2",
"id" : 43,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "EXECUTE",
"role" : "SQOOP_SERVICE_CHECK",
"start_time" : 1352864331830,
"stage_id" : 3
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/42",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Hdp-hadoop::Exec-hadoop[pig::service_check::create_file]/Hdp::Exec[hadoop --config /etc/hadoop/conf dfs -rmr pigsmoke.out passwd; hadoop dfs -put /etc/passwd passwd ]/Exec[hadoop --config /etc/hadoop/conf dfs -rmr pigsmoke.out passwd; hadoop dfs -put /etc/passwd passwd ]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/File[/tmp/pigSmoke.sh]/ensure: defined content as '{md5}feac231e484c08e3bc5f83d0ee189a8c'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:39:58,367 [main] INFO org.apache.pig.Main - Apache Pig version 0.10.0.1 (rexported) compiled Nov 10 2012, 19:10:20\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:39:58,368 [main] INFO org.apache.pig.Main - Logging error messages to: /home/ambari_qa/pig_1352864398364.log\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:39:58,789 [main] INFO org.apache.pig.backend.hadoop.executionengine.HExecutionEngine - Connecting to hadoop file system at: hdfs://host5:8020\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:39:59,058 [main] INFO org.apache.pig.backend.hadoop.executionengine.HExecutionEngine - Connecting to map-reduce job tracker at: host3:50300\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:39:59,907 [main] INFO org.apache.pig.tools.pigstats.ScriptState - Pig features used in the script: UNKNOWN\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:00,158 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRCompiler - File concatenation threshold: 100 optimistic? false\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:00,183 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MultiQueryOptimizer - MR plan size before optimization: 1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:00,183 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MultiQueryOptimizer - MR plan size after optimization: 1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:00,288 [main] INFO org.apache.pig.tools.pigstats.ScriptState - Pig script settings are added to the job\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:00,312 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler - mapred.job.reduce.markreset.buffer.percent is not set, set to default 0.3\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:00,315 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler - creating jar file Job4537005419718909074.jar\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:05,356 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler - jar file Job4537005419718909074.jar created\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:05,377 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler - Setting up single store job\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:05,432 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - 1 map-reduce job(s) waiting for submission.\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:05,932 [Thread-6] INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat - Total input paths to process : 1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:05,933 [Thread-6] INFO org.apache.pig.backend.hadoop.executionengine.util.MapRedUtil - Total input paths to process : 1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:05,934 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - 0% complete\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:05,971 [Thread-6] WARN org.apache.hadoop.io.compress.snappy.LoadSnappy - Snappy native library is available\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:05,973 [Thread-6] INFO org.apache.hadoop.util.NativeCodeLoader - Loaded the native-hadoop library\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:05,973 [Thread-6] INFO org.apache.hadoop.io.compress.snappy.LoadSnappy - Snappy native library loaded\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:05,977 [Thread-6] INFO org.apache.pig.backend.hadoop.executionengine.util.MapRedUtil - Total input paths (combined) to process : 1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:06,811 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - HadoopJobId: job_201211132238_0002\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:06,812 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - More information at: http://host3:50030/jobdetails.jsp?jobid=job_201211132238_0002\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:17,380 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - 50% complete\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:21,432 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - 100% complete\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:21,434 [main] INFO org.apache.pig.tools.pigstats.SimplePigStats - Script Statistics: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: HadoopVersion\tPigVersion\tUserId\tStartedAt\tFinishedAt\tFeatures\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 1.1.0.1\t0.10.0.1\tambari_qa\t2012-11-13 22:40:00\t2012-11-13 22:40:21\tUNKNOWN\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Success!\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Job Stats (time in seconds):\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: JobId\tMaps\tReduces\tMaxMapTime\tMinMapTIme\tAvgMapTime\tMaxReduceTime\tMinReduceTime\tAvgReduceTime\tAlias\tFeature\tOutputs\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: job_201211132238_0002\t1\t0\t3\t3\t3\t0\t0\t0\tA,B\tMAP_ONLY\thdfs://host5:8020/user/ambari_qa/pigsmoke.out,\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Input(s):\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Successfully read 36 records (2137 bytes) from: \"hdfs://host5:8020/user/ambari_qa/passwd\"\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Output(s):\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Successfully stored 36 records (236 bytes) in: \"hdfs://host5:8020/user/ambari_qa/pigsmoke.out\"\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Counters:\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Total records written : 36\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Total bytes written : 236\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Spillable Memory Manager spill count : 0\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Total bags proactively spilled: 0\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Total records proactively spilled: 0\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Job DAG:\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: job_201211132238_0002\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:21,446 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - Success!\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Hdp-hadoop::Exec-hadoop[pig::service_check::test]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -test -e pigsmoke.out]/Exec[hadoop --config /etc/hadoop/conf fs -test -e pigsmoke.out]: Triggered 'refresh' from 1 events\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 32.06 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Total records proactively spilled: 0\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: Job DAG:\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: job_201211132238_0002\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: 2012-11-13 22:40:21,446 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - Success!\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Exec[/tmp/pigSmoke.sh]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-pig::Pig::Service_check/Hdp-hadoop::Exec-hadoop[pig::service_check::test]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -test -e pigsmoke.out]/Exec[hadoop --config /etc/hadoop/conf fs -test -e pigsmoke.out]: Triggered 'refresh' from 1 events\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 32.06 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host2",
"id" : 42,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "EXECUTE",
"role" : "PIG_SERVICE_CHECK",
"start_time" : 1352864331815,
"stage_id" : 3
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/36",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}9786ed97b221e37075bdb64400bc804a' to '{md5}8e06d7ec24fe5acd81917162d58857db'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Tasktracker/Hdp-hadoop::Service[tasktracker]/Hdp::Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/mapred-site.xml]/content: content changed '{md5}30e43dbdb225dad740d632ecc8f6ae11' to '{md5}558aadf67e4d29865a6d935076d3868b'\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.20 seconds\u001B[0m\n\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}9786ed97b221e37075bdb64400bc804a' to '{md5}8e06d7ec24fe5acd81917162d58857db'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Tasktracker/Hdp-hadoop::Service[tasktracker]/Hdp::Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/mapred-site.xml]/content: content changed '{md5}30e43dbdb225dad740d632ecc8f6ae11' to '{md5}558aadf67e4d29865a6d935076d3868b'\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.20 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host1",
"id" : 36,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "TASKTRACKER",
"start_time" : 1352864269562,
"stage_id" : 2
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/34",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}63d8feab1255e45d9549ccea14f687c4' to '{md5}4673b67e078cc9d84ffc4873e5198edf'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Jobtracker/Hdp-hadoop::Service[jobtracker]/Hdp::Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start jobtracker']/Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start jobtracker']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Jobtracker/Hdp-hadoop::Service[historyserver]/Hdp::Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start historyserver']/Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start historyserver']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/mapred-site.xml]/content: content changed '{md5}cccc03b9f3384eac76957c7fe2f12849' to '{md5}07e946dbf4ae6632034ee6715a085b92'\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 9.76 seconds\u001B[0m\n\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}63d8feab1255e45d9549ccea14f687c4' to '{md5}4673b67e078cc9d84ffc4873e5198edf'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Jobtracker/Hdp-hadoop::Service[jobtracker]/Hdp::Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start jobtracker']/Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start jobtracker']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Jobtracker/Hdp-hadoop::Service[historyserver]/Hdp::Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start historyserver']/Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start historyserver']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/mapred-site.xml]/content: content changed '{md5}cccc03b9f3384eac76957c7fe2f12849' to '{md5}07e946dbf4ae6632034ee6715a085b92'\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 9.76 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host3",
"id" : 34,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "JOBTRACKER",
"start_time" : 1352864269447,
"stage_id" : 2
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/28",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /File[/etc/nagios/objects/hadoop-hostgroups.cfg]/content: content changed '{md5}ffff62426c4f7a42c1cb1ca44b324dad' to '{md5}21ad9f95dd93ee39fc87db07b7ea05be'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/nagios/objects/hadoop-hosts.cfg]/content: content changed '{md5}fdcc51e399dd2381778a163933ef2beb' to '{md5}afbfd32db940db5fff4701c964169c27'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b /etc/nagios/htpasswd.users nagiosadmin admin]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Services/Service[nagios]/ensure: ensure changed 'stopped' to 'running'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Services/Service[nagios]: Triggered 'refresh' from 1 events\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 8.78 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /File[/etc/nagios/objects/hadoop-hostgroups.cfg]/content: content changed '{md5}ffff62426c4f7a42c1cb1ca44b324dad' to '{md5}21ad9f95dd93ee39fc87db07b7ea05be'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/nagios/objects/hadoop-hosts.cfg]/content: content changed '{md5}fdcc51e399dd2381778a163933ef2beb' to '{md5}afbfd32db940db5fff4701c964169c27'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b /etc/nagios/htpasswd.users nagiosadmin admin]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Services/Service[nagios]/ensure: ensure changed 'stopped' to 'running'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Services/Service[nagios]: Triggered 'refresh' from 1 events\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 8.78 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host5",
"id" : 28,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "NAGIOS_SERVER",
"start_time" : 1352864089985,
"stage_id" : 1
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/37",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}df2d55356b238461af57fe22ad993e4d' to '{md5}62a467fcccda8169de563170e39e3419'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Tasktracker/Hdp-hadoop::Service[tasktracker]/Hdp::Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/mapred-site.xml]/content: content changed '{md5}1a3769d695902dba39b5645fef3766e0' to '{md5}23097908e8b54f7dbc4d31b5d26d21e7'\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 5.66 seconds\u001B[0m\n\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}df2d55356b238461af57fe22ad993e4d' to '{md5}62a467fcccda8169de563170e39e3419'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Tasktracker/Hdp-hadoop::Service[tasktracker]/Hdp::Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/mapred-site.xml]/content: content changed '{md5}1a3769d695902dba39b5645fef3766e0' to '{md5}23097908e8b54f7dbc4d31b5d26d21e7'\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 5.66 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host4",
"id" : 37,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "TASKTRACKER",
"start_time" : 1352864269589,
"stage_id" : 2
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/41",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::create_file]/Hdp::Exec[hadoop --config /etc/hadoop/conf dfs -rmr mapredsmokeoutput mapredsmokeinput ; hadoop dfs -put /etc/passwd mapredsmokeinput ]/Exec[hadoop --config /etc/hadoop/conf dfs -rmr mapredsmokeoutput mapredsmokeinput ; hadoop dfs -put /etc/passwd mapredsmokeinput ]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:13 INFO input.FileInputFormat: Total input paths to process : 1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:13 WARN snappy.LoadSnappy: Snappy native library is available\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:13 INFO util.NativeCodeLoader: Loaded the native-hadoop library\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:13 INFO snappy.LoadSnappy: Snappy native library loaded\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:14 INFO mapred.JobClient: Running job: job_201211132238_0001\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:15 INFO mapred.JobClient: map 0% reduce 0%\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:30 INFO mapred.JobClient: map 100% reduce 0%\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:38 INFO mapred.JobClient: map 100% reduce 33%\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:40 INFO mapred.JobClient: map 100% reduce 100%\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Job complete: job_201211132238_0001\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Counters: 29\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Job Counters \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Launched reduce tasks=1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: SLOTS_MILLIS_MAPS=6106\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Total time spent by all reduces waiting after reserving slots (ms)=0\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Total time spent by all maps waiting after reserving slots (ms)=0\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Launched map tasks=1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Data-local map tasks=1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: SLOTS_MILLIS_REDUCES=9332\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: File Output Format Counters \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Bytes Written=1845\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: FileSystemCounters\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: FILE_BYTES_READ=2095\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: HDFS_BYTES_READ=1893\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: FILE_BYTES_WRITTEN=117522\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: HDFS_BYTES_WRITTEN=1845\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: File Input Format Counters \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Bytes Read=1755\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Map-Reduce Framework\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Map output materialized bytes=2095\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Map input records=36\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Reduce shuffle bytes=2095\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Spilled Records=122\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Map output bytes=2003\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: CPU time spent (ms)=1920\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Total committed heap usage (bytes)=433913856\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Combine input records=62\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: SPLIT_RAW_BYTES=138\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Reduce input records=61\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Reduce input groups=61\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Combine output records=61\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Physical memory (bytes) snapshot=381779968\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Reduce output records=61\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Virtual memory (bytes) snapshot=2704003072\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Map output records=62\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::test]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -test -e mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf fs -test -e mapredsmokeoutput]: Triggered 'refresh' from 1 events\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 37.52 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Reduce input records=61\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Reduce input groups=61\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Combine output records=61\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Physical memory (bytes) snapshot=381779968\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Reduce output records=61\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Virtual memory (bytes) snapshot=2704003072\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: 12/11/13 22:39:41 INFO mapred.JobClient: Map output records=62\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::run_wordcount]/Hdp::Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf jar /usr/lib/hadoop//hadoop-examples.jar wordcount mapredsmokeinput mapredsmokeoutput]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Mapred::Service_check/Hdp-hadoop::Exec-hadoop[mapred::service_check::test]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -test -e mapredsmokeoutput]/Exec[hadoop --config /etc/hadoop/conf fs -test -e mapredsmokeoutput]: Triggered 'refresh' from 1 events\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 37.52 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host2",
"id" : 41,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "EXECUTE",
"role" : "MAPREDUCE_SERVICE_CHECK",
"start_time" : 1352864331797,
"stage_id" : 3
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/44",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/File[/tmp/oozieSmoke.sh]/ensure: defined content as '{md5}a421efea655810cf298d18d7b5c1ebdd'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Deleted hdfs://host5:8020/user/ambari_qa/examples\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Deleted hdfs://host5:8020/user/ambari_qa/input-data\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Job ID : 0000002-121113223948436-oozie-oozi-W\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Workflow Name : map-reduce-wf\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: App Path : hdfs://host5:8020/user/ambari_qa/examples/apps/map-reduce\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Status : FAILED\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Run : 0\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: User : ambari_qa\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Group : -\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Created : 2012-11-14 03:41\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Started : 2012-11-14 03:41\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Last Modified : 2012-11-14 03:41\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Ended : -\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: CoordAction ID: -\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Actions\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ID Status Ext ID Ext Status Err Code \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: 0000002-121113223948436-oozie-oozi-W@mr-node FAILED - - EL_ERROR \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Job ID : 0000002-121113223948436-oozie-oozi-W\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Workflow Name : map-reduce-wf\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: App Path : hdfs://host5:8020/user/ambari_qa/examples/apps/map-reduce\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Status : FAILED\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Run : 0\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: User : ambari_qa\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Group : -\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Created : 2012-11-14 03:41\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Started : 2012-11-14 03:41\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Last Modified : 2012-11-14 03:41\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Ended : -\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: CoordAction ID: -\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Actions\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ID Status Ext ID Ext Status Err Code \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: 0000002-121113223948436-oozie-oozi-W@mr-node FAILED - - EL_ERROR \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: workflow_status=FAILED\u001B[0m\n\u001B[1;35merr: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: change from notrun to 0 failed: sh /tmp/oozieSmoke.sh /etc/oozie/conf /etc/hadoop/conf ambari_qa false /etc/security/keytabs/ambari_qa.headless.keytab EXAMPLE.COM returned 1 instead of one of [0] at /var/lib/ambari-agent/puppet/modules/hdp-oozie/manifests/oozie/service_check.pp:62\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 50.53 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Ended : -\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: CoordAction ID: -\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: Actions\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ID Status Ext ID Ext Status Err Code \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: 0000002-121113223948436-oozie-oozi-W@mr-node FAILED - - EL_ERROR \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: ------------------------------------------------------------------------------------------------------------------------------------\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: workflow_status=FAILED\u001B[0m\n\u001B[1;35merr: /Stage[2]/Hdp-oozie::Oozie::Service_check/Hdp-oozie::Smoke_shell_file[oozieSmoke.sh]/Exec[/tmp/oozieSmoke.sh]/returns: change from notrun to 0 failed: sh /tmp/oozieSmoke.sh /etc/oozie/conf /etc/hadoop/conf ambari_qa false /etc/security/keytabs/ambari_qa.headless.keytab EXAMPLE.COM returned 1 instead of one of [0] at /var/lib/ambari-agent/puppet/modules/hdp-oozie/manifests/oozie/service_check.pp:62\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 50.53 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "None",
"host_name" : "host2",
"id" : 44,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "EXECUTE",
"role" : "OOZIE_SERVICE_CHECK",
"start_time" : 1352864442993,
"stage_id" : 4
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/29",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}d22fc5749dde07b5b5acff255c490e9d' to '{md5}0617b67bc5192f5e44cf98b2fe25eb6f'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/hdfs-site.xml]/content: content changed '{md5}0d021082a9258c648b5259d3af27ff62' to '{md5}39e33160b7f2933a12fc338a81ae9fcd'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/File[/tmp/checkForFormat.sh]/ensure: defined content as '{md5}5dd6bddf910d8ca9f6fefa44e7bbec7e'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: NameNode Dirname = /grid/0/hadoop/hdfs/namenode\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: NameNode Dirname = /grid/1/hadoop/hdfs/namenode\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:57 INFO namenode.NameNode: STARTUP_MSG: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: /************************************************************\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: STARTUP_MSG: Starting NameNode\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: STARTUP_MSG: host = host5/10.118.58.228\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: STARTUP_MSG: args = [-format]\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: STARTUP_MSG: version = 1.1.0.1\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: STARTUP_MSG: build = -r ; compiled by 'jenkins' on Sat Nov 10 18:55:09 PST 2012\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: ************************************************************/\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: Re-format filesystem in /grid/0/hadoop/hdfs/namenode ? (Y or N) Re-format filesystem in /grid/1/hadoop/hdfs/namenode ? (Y or N) 12/11/13 22:36:58 INFO util.GSet: VM type = 64-bit\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 INFO util.GSet: 2% max memory = 19.2 MB\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 INFO util.GSet: capacity = 2^21 = 2097152 entries\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 INFO util.GSet: recommended=2097152, actual=2097152\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 INFO namenode.FSNamesystem: fsOwner=hdfs\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 INFO namenode.FSNamesystem: supergroup=supergroup\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 INFO namenode.FSNamesystem: isPermissionEnabled=true\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 INFO namenode.FSNamesystem: dfs.block.invalidate.limit=100\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 WARN namenode.FSNamesystem: The dfs.support.append option is in your configuration, however append is not supported. This configuration option is no longer required to enable sync\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 INFO namenode.FSNamesystem: isAccessTokenEnabled=false accessKeyUpdateInterval=0 min(s), accessTokenLifetime=0 min(s)\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 INFO namenode.NameNode: Caching file names occuring more than 10 times \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:58 INFO common.Storage: Image file of size 110 saved in 0 seconds.\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:59 INFO namenode.FSEditLog: closing edit log: position=4, editlog=/grid/0/hadoop/hdfs/namenode/current/edits\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:59 INFO namenode.FSEditLog: close success: truncate to 4, editlog=/grid/0/hadoop/hdfs/namenode/current/edits\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:59 INFO common.Storage: Storage directory /grid/0/hadoop/hdfs/namenode has been successfully formatted.\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:59 INFO common.Storage: Image file of size 110 saved in 0 seconds.\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:59 INFO namenode.FSEditLog: closing edit log: position=4, editlog=/grid/1/hadoop/hdfs/namenode/current/edits\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:59 INFO namenode.FSEditLog: close success: truncate to 4, editlog=/grid/1/hadoop/hdfs/namenode/current/edits\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:59 INFO common.Storage: Storage directory /grid/1/hadoop/hdfs/namenode has been successfully formatted.\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: 12/11/13 22:36:59 INFO namenode.NameNode: SHUTDOWN_MSG: \u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: /************************************************************\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: SHUTDOWN_MSG: Shutting down NameNode at host5/10.118.58.228\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: ************************************************************/\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: yes: standard output: Broken pipe\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: yes: write error\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Exec[/tmp/checkForFormat.sh]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode::Format/Hdp::Exec[set namenode mark]/Exec[set namenode mark]: Triggered 'refresh' from 1 events\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Service[namenode]/Hdp::Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode']/Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/oozie]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/oozie]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/oozie]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/oozie]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/oozie]/Hdp-hadoop::Exec-hadoop[fs -chmod 775 /user/oozie]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chmod 775 /user/oozie]/Exec[hadoop --config /etc/hadoop/conf fs -chmod 775 /user/oozie]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred/system]/Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred/system]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -chown hdfs /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown hdfs /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -chown hdfs /tmp]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -chmod 770 /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chmod 770 /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -chmod 770 /user/ambari_qa]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -chown ambari_qa /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown ambari_qa /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -chown ambari_qa /user/ambari_qa]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -chmod 777 /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chmod 777 /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -chmod 777 /tmp]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/oozie]/Hdp-hadoop::Exec-hadoop[fs -chown oozie /user/oozie]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown oozie /user/oozie]/Exec[hadoop --config /etc/hadoop/conf fs -chown oozie /user/oozie]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 51.23 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/oozie]/Hdp-hadoop::Exec-hadoop[fs -chmod 775 /user/oozie]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chmod 775 /user/oozie]/Exec[hadoop --config /etc/hadoop/conf fs -chmod 775 /user/oozie]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred/system]/Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred/system]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -chown hdfs /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown hdfs /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -chown hdfs /tmp]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -chmod 770 /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chmod 770 /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -chmod 770 /user/ambari_qa]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -chown ambari_qa /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown ambari_qa /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -chown ambari_qa /user/ambari_qa]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -chmod 777 /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chmod 777 /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -chmod 777 /tmp]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/oozie]/Hdp-hadoop::Exec-hadoop[fs -chown oozie /user/oozie]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown oozie /user/oozie]/Exec[hadoop --config /etc/hadoop/conf fs -chown oozie /user/oozie]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 51.23 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host5",
"id" : 29,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "NAMENODE",
"start_time" : 1352864090025,
"stage_id" : 1
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/39",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}db0f2efdd03e4845c0528e1978b25644' to '{md5}84df095b5569e720b4aeaf4a96e0ee6d'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Tasktracker/Hdp-hadoop::Service[tasktracker]/Hdp::Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/mapred-site.xml]/content: content changed '{md5}f72b50738651b3cb6bcef039b59ffdcb' to '{md5}e750ca8f3497b9a4656f782dcf335dab'\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 5.29 seconds\u001B[0m\n\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}db0f2efdd03e4845c0528e1978b25644' to '{md5}84df095b5569e720b4aeaf4a96e0ee6d'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Tasktracker/Hdp-hadoop::Service[tasktracker]/Hdp::Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/Exec[su - mapred -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/mapred-site.xml]/content: content changed '{md5}f72b50738651b3cb6bcef039b59ffdcb' to '{md5}e750ca8f3497b9a4656f782dcf335dab'\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 5.29 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host2",
"id" : 39,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "TASKTRACKER",
"start_time" : 1352864269636,
"stage_id" : 2
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/32",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}c892638e8c76c66f072640eb32b0637a' to '{md5}db0f2efdd03e4845c0528e1978b25644'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/hdfs-site.xml]/content: content changed '{md5}fcfc81d25ae7ad5f5aaaacdc3d47f0f5' to '{md5}036cea2c613ff235499a7ed743be467f'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Datanode/Hdp-hadoop::Service[datanode]/Hdp::Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.38 seconds\u001B[0m\n\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/init.pp:134 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:74 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $service_state at /var/lib/ambari-agent/puppet/modules/hdp-hadoop/manifests/service.pp:83 is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;33mwarning: Dynamic lookup of $ambari_db_server_host is deprecated. Support will be removed in Puppet 2.8. Use a fully-qualified variable name (e.g., $classname::variable) or parameterized classes.\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/core-site.xml]/content: content changed '{md5}c892638e8c76c66f072640eb32b0637a' to '{md5}db0f2efdd03e4845c0528e1978b25644'\u001B[0m\n\u001B[0;36mnotice: /File[/etc/hadoop/conf/hdfs-site.xml]/content: content changed '{md5}fcfc81d25ae7ad5f5aaaacdc3d47f0f5' to '{md5}036cea2c613ff235499a7ed743be467f'\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-hadoop::Datanode/Hdp-hadoop::Service[datanode]/Hdp::Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/Exec[su - hdfs -c '/usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode']/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.38 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host2",
"id" : 32,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "DATANODE",
"start_time" : 1352864090145,
"stage_id" : 1
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/25",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Gmond/Hdp::Exec[hdp-gmond service]/Exec[hdp-gmond service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.84 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Gmond/Hdp::Exec[hdp-gmond service]/Exec[hdp-gmond service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.84 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host1",
"id" : 25,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "GANGLIA_MONITOR",
"start_time" : 1352864089770,
"stage_id" : 1
}
},
{
"href" : "http://ambari:8080/api/clusters/mycluster/requests/2/tasks/23",
"Tasks" : {
"exit_code" : 0,
"stdout" : "\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Gmond/Hdp::Exec[hdp-gmond service]/Exec[hdp-gmond service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.77 seconds\u001B[0m\n\n\u001B[0;36mnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Snappy::Package::Ln[32]/Hdp::Exec[hdp::snappy::package::ln 32]/Exec[hdp::snappy::package::ln 32]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPNameNode]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPSlaves]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPHBaseMaster]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Config-gen/Hdp-ganglia::Config::Generate_monitor[HDPJobTracker]/Hdp::Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/Exec[/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: /Stage[2]/Hdp-ganglia::Monitor::Gmond/Hdp::Exec[hdp-gmond service]/Exec[hdp-gmond service]/returns: executed successfully\u001B[0m\n\u001B[0;36mnotice: Finished catalog run in 6.77 seconds\u001B[0m",
"status" : "COMPLETED",
"stderr" : "none",
"host_name" : "host3",
"id" : 23,
"cluster_name" : "mycluster",
"attempt_cnt" : 1,
"request_id" : 2,
"command" : "START",
"role" : "GANGLIA_MONITOR",
"start_time" : 1352864089600,
"stage_id" : 1
}
}
]
}