blob: 5f4fb92f3c97c201cab90956090e286b6d8a71af [file] [log] [blame]
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>chukwa.engine.dsDirectory.rootFolder</name>
<value>/chukwa/repos</value>
<description>Chukwa Data location on HDFS</description>
</property>
<!-- database tables -->
<property>
<name>report.db.name.nodeactivity</name>
<value>node_activity</value>
<description></description>
</property>
<property>
<name>report.db.primary.key.nodeactivity</name>
<value>timestamp</value>
</property>
<property>
<name>metric.nodeactivity.down</name>
<value>down</value>
</property>
<property>
<name>metric.nodeactivity.downmachines</name>
<value>downMachines</value>
</property>
<property>
<name>metric.nodeactivity.free</name>
<value>free</value>
</property>
<property>
<name>metric.nodeactivity.freemachines</name>
<value>freeMachines</value>
</property>
<property>
<name>metric.nodeactivity.used</name>
<value>used</value>
</property>
<property>
<name>metric.nodeactivity.usedmachines</name>
<value>usedMachines</value>
</property>
<property>
<name>report.db.name.hod_job_digest</name>
<value>hod_job_digest</value>
</property>
<property>
<name>report.db.name.cluster_system_metrics</name>
<value>cluster_system_metrics</value>
</property>
<property>
<name>report.db.name.systemmetrics</name>
<value>system_metrics</value>
<description></description>
</property>
<property>
<name>report.db.name.df</name>
<value>disk</value>
</property>
<property>
<name>report.db.name.cluster_disk</name>
<value>cluster_disk</value>
</property>
<property>
<name>report.db.name.hadoop_dfs_namenode</name>
<value>dfs_namenode</value>
</property>
<property>
<name>report.db.name.hadoop_dfs_datanode</name>
<value>dfs_datanode</value>
</property>
<property>
<name>report.db.name.hadoop_dfs_throughput</name>
<value>dfs_throughput</value>
</property>
<property>
<name>report.db.name.hadoop_dfs_fsnamesystem</name>
<value>dfs_fsnamesystem</value>
</property>
<property>
<name>report.db.name.hadoop_dfs_fsdirectory</name>
<value>dfs_namenode</value>
</property>
<property>
<name>report.db.name.hadoop_jvm_metrics</name>
<value>hadoop_jvm</value>
</property>
<property>
<name>report.db.name.hadoop_mapred_jobtracker</name>
<value>hadoop_mapred</value>
</property>
<property>
<name>report.db.name.hadoop_rpc_metrics</name>
<value>hadoop_rpc</value>
</property>
<property>
<name>report.db.name.cluster_hadoop_rpc</name>
<value>cluster_hadoop_rpc</value>
</property>
<property>
<name>report.db.name.mssrgraph</name>
<value>mssrgraph</value>
</property>
<property>
<name>report.db.name.mrjobcounters</name>
<value>MRJobCounters</value>
</property>
<property>
<name>report.db.name.hodjob</name>
<value>HodJob</value>
</property>
<property>
<name>report.db.name.hodmachine</name>
<value>hod_machine</value>
</property>
<property>
<name>report.db.name.mrjob</name>
<value>MRJob</value>
</property>
<property>
<name>report.db.name.mrjobts</name>
<value>MRJobTSData</value>
</property>
<property>
<name>report.db.name.hodjobunprocessed</name>
<value>HodJobUnprocessed</value>
</property>
<property>
<name>report.db.name.hodjobdigest</name>
<value>HodJobDigest</value>
</property>
<property>
<name>report.db.name.queueInfo</name>
<value>QueueInfo</value>
</property>
<property>
<name>report.db.name.jobcounters</name>
<value>MRJobCounters</value>
</property>
<property>
<name>report.db.name.user_util</name>
<value>user_util</value>
</property>
<property>
<name>report.db.name.clienttrace</name>
<value>ClientTrace</value>
<description></description>
</property>
<!-- System Metrics Config -->
<property>
<name>report.db.primary.key.systemmetrics</name>
<value>timestamp</value>
</property>
<property>
<name>metric.systemmetrics.csource</name>
<value>host</value>
</property>
<property>
<name>metric.systemmetrics.ldavg-1</name>
<value>load_1</value>
</property>
<property>
<name>metric.systemmetrics.ldavg-5</name>
<value>load_5</value>
</property>
<property>
<name>metric.systemmetrics.ldavg-15</name>
<value>load_15</value>
</property>
<property>
<name>metric.systemmetrics.tasks_total</name>
<value>task_total</value>
</property>
<property>
<name>metric.systemmetrics.tasks_running</name>
<value>task_running</value>
</property>
<property>
<name>metric.systemmetrics.tasks_sleeping</name>
<value>task_sleep</value>
</property>
<property>
<name>metric.systemmetrics.tasks_stopped</name>
<value>task_stopped</value>
</property>
<property>
<name>metric.systemmetrics.tasks_zombie</name>
<value>task_zombie</value>
</property>
<property>
<name>metric.systemmetrics.mem_total</name>
<value>mem_total</value>
</property>
<property>
<name>metric.systemmetrics.mem_buffers</name>
<value>mem_buffers</value>
</property>
<property>
<name>metric.systemmetrics.mem_free</name>
<value>mem_free</value>
</property>
<property>
<name>metric.systemmetrics.mem_used</name>
<value>mem_used</value>
</property>
<property>
<name>metric.systemmetrics.mem_shared</name>
<value>mem_shared</value>
</property>
<property>
<name>metric.systemmetrics.kbcached</name>
<value>mem_cached</value>
</property>
<property>
<name>metric.systemmetrics.eth0.rxerr/s</name>
<value>eth0_rxerrs</value>
</property>
<property>
<name>metric.systemmetrics.eth0.rxbyt/s</name>
<value>eth0_rxbyts</value>
</property>
<property>
<name>metric.systemmetrics.eth0.rxpck/s</name>
<value>eth0_rxpcks</value>
</property>
<property>
<name>metric.systemmetrics.eth0.rxdrop/s</name>
<value>eth0_rxdrops</value>
</property>
<property>
<name>metric.systemmetrics.eth0.txerr/s</name>
<value>eth0_txerrs</value>
</property>
<property>
<name>metric.systemmetrics.eth0.txbyt/s</name>
<value>eth0_txbyts</value>
</property>
<property>
<name>metric.systemmetrics.eth0.txpck/s</name>
<value>eth0_txpcks</value>
</property>
<property>
<name>metric.systemmetrics.eth0.txdrop/s</name>
<value>eth0_txdrops</value>
</property>
<property>
<name>metric.systemmetrics.eth1.rxerr/s</name>
<value>eth1_rxerrs</value>
</property>
<property>
<name>metric.systemmetrics.eth1.rxbyt/s</name>
<value>eth1_rxbyts</value>
</property>
<property>
<name>metric.systemmetrics.eth1.rxpck/s</name>
<value>eth1_rxpcks</value>
</property>
<property>
<name>metric.systemmetrics.eth1.rxdrop/s</name>
<value>eth1_rxdrops</value>
</property>
<property>
<name>metric.systemmetrics.eth1.txerr/s</name>
<value>eth1_txerrs</value>
</property>
<property>
<name>metric.systemmetrics.eth1.txbyt/s</name>
<value>eth1_txbyts</value>
</property>
<property>
<name>metric.systemmetrics.eth1.txpck/s</name>
<value>eth1_txpcks</value>
</property>
<property>
<name>metric.systemmetrics.eth1.txdrop/s</name>
<value>eth1_txdrops</value>
</property>
<property>
<name>metric.systemmetrics.sda.rkb/s</name>
<value>sda_rkbs</value>
</property>
<property>
<name>metric.systemmetrics.sda.wkb/s</name>
<value>sda_wkbs</value>
</property>
<property>
<name>metric.systemmetrics.sdb.rkb/s</name>
<value>sdb_rkbs</value>
</property>
<property>
<name>metric.systemmetrics.sdb.wkb/s</name>
<value>sdb_wkbs</value>
</property>
<property>
<name>metric.systemmetrics.sdc.rkb/s</name>
<value>sdc_rkbs</value>
</property>
<property>
<name>metric.systemmetrics.sdc.wkb/s</name>
<value>sdc_wkbs</value>
</property>
<property>
<name>metric.systemmetrics.sdd.rkb/s</name>
<value>sdd_rkbs</value>
</property>
<property>
<name>metric.systemmetrics.sdd.wkb/s</name>
<value>sdd_wkbs</value>
</property>
<property>
<name>metric.systemmetrics.%idle</name>
<value>cpu_idle_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.%nice</name>
<value>cpu_nice_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.%sys</name>
<value>cpu_system_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.%system</name>
<value>cpu_system_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.%user</name>
<value>cpu_user_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.cpu_hi%</name>
<value>cpu_hirq_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.cpu_si%</name>
<value>cpu_sirq_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.%iowait</name>
<value>iowait_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.mem_buffers_pcnt</name>
<value>mem_buffers_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.mem_cached_pcnt</name>
<value>mem_cached_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.eth0_busy_pcnt</name>
<value>eth0_busy_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.eth1_busy_pcnt</name>
<value>eth1_busy_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.sda.%util</name>
<value>sda_busy_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.sdb.%util</name>
<value>sdb_busy_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.sdc.%util</name>
<value>sdc_busy_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.sdd.%util</name>
<value>sdd_busy_pcnt</value>
</property>
<property>
<name>metric.systemmetrics.swap_used_pcnt</name>
<value>swap_used_pcnt</value>
</property>
<property>
<name>report.db.primary.key.df</name>
<value>timestamp</value>
</property>
<property>
<name>metric.df.available</name>
<value>available</value>
</property>
<property>
<name>metric.df.used</name>
<value>used</value>
</property>
<property>
<name>metric.df.use%</name>
<value>used_percent</value>
</property>
<property>
<name>metric.df.mounted-on</name>
<value>mount</value>
</property>
<property>
<name>metric.df.filesystem</name>
<value>fs</value>
</property>
<property>
<name>metric.df.csource</name>
<value>host</value>
</property>
<!-- clienttrace -->
<property>
<name>report.db.primary.key.clienttrace</name>
<value>timestamp</value>
</property>
<property>
<name>metric.clienttrace.local_hdfs_read</name>
<value>local_hdfs_read</value>
</property>
<property>
<name>metric.clienttrace.intra_rack_hdfs_read</name>
<value>intra_rack_hdfs_read</value>
</property>
<property>
<name>metric.clienttrace.inter_rack_hdfs_read</name>
<value>inter_rack_hdfs_read</value>
</property>
<property>
<name>metric.clienttrace.local_hdfs_write</name>
<value>local_hdfs_write</value>
</property>
<property>
<name>metric.clienttrace.intra_rack_hdfs_write</name>
<value>intra_rack_hdfs_write</value>
</property>
<property>
<name>metric.clienttrace.inter_rack_hdfs_write</name>
<value>inter_rack_hdfs_write</value>
</property>
<property>
<name>metric.clienttrace.local_mapred_shuffle</name>
<value>local_mapred_shuffle</value>
</property>
<property>
<name>metric.clienttrace.intra_rack_mapred_shuffle</name>
<value>intra_rack_mapred_shuffle</value>
</property>
<property>
<name>metric.clienttrace.inter_rack_mapred_shuffle</name>
<value>inter_rack_mapred_shuffle</value>
</property>
<!-- dfs name node metrics -->
<property>
<name>report.db.primary.key.hadoop_dfs_namenode</name>
<value>timestamp</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.csource</name>
<value>host</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.addblockops</name>
<value>add_block_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.blockscorrupted</name>
<value>blocks_corrupted</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.createfileops</name>
<value>create_file_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.deletefileops</name>
<value>delete_file_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.filescreated</name>
<value>files_created</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.filesrenamed</name>
<value>files_renamed</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.getblocklocations</name>
<value>get_block_locations</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.getlistingops</name>
<value>get_listing_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.safemodetime</name>
<value>safe_mode_time</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.syncs_avg_time</name>
<value>syncs_avg_time</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.syncs_num_ops</name>
<value>syncs_num_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.transactions_avg_time</name>
<value>transactions_avg_time</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.transactions_num_ops</name>
<value>transactions_num_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.blockreport_avg_time</name>
<value>block_report_avg_time</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.blockreport_num_ops</name>
<value>block_report_num_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_namenode.fsimageloadtime</name>
<value>fs_image_load_time</value>
</property>
<!-- dfs data node -->
<property>
<name>report.db.primary.key.hadoop_dfs_datanode</name>
<value>timestamp</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.hostname</name>
<value>host</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.blockreports_avg_time</name>
<value>block_reports_avg_time</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.blockreports_num_ops</name>
<value>block_reports_num_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.block_verification_failures</name>
<value>block_verification_failures</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.blocks_read</name>
<value>blocks_read</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.blocks_removed</name>
<value>blocks_removed</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.blocks_replicated</name>
<value>blocks_replicated</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.blocks_verified</name>
<value>blocks_verified</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.blocks_written</name>
<value>blocks_written</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.bytes_read</name>
<value>bytes_read</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.bytes_written</name>
<value>bytes_written</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.copyblockop_avg_time</name>
<value>copy_block_op_avg_time</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.copyblockop_num_ops</name>
<value>copy_block_op_num_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.heartbeats_avg_time</name>
<value>heart_beats_avg_time</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.heartbeats_num_ops</name>
<value>heart_beats_num_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.readblockop_avg_time</name>
<value>read_block_op_avg_time</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.readblockop_num_ops</name>
<value>read_block_op_num_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.readmetadataop_avg_time</name>
<value>read_metadata_op_avg_time</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.readmetadataop_num_ops</name>
<value>read_metadata_op_num_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.reads_from_local_client</name>
<value>reads_from_local_client</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.reads_from_remote_client</name>
<value>reads_from_remote_client</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.replaceblockop_avg_time</name>
<value>replace_block_op_avg_time</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.replaceblockop_num_ops</name>
<value>replace_block_op_num_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.writeblockop_avg_time</name>
<value>write_block_op_avg_time</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.writeblockop_num_ops</name>
<value>write_block_op_num_ops</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.writes_from_local_client</name>
<value>writes_from_local_client</value>
</property>
<property>
<name>metric.hadoop_dfs_datanode.writes_from_remote_client</name>
<value>writes_from_remote_client</value>
</property>
<!-- dfs fs name system status -->
<property>
<name>report.db.primary.key.hadoop_dfs_fsnamesystem</name>
<value>timestamp</value>
</property>
<property>
<name>metric.hadoop_dfs_fsnamesystem.csource</name>
<value>host</value>
</property>
<property>
<name>metric.hadoop_dfs_fsnamesystem.blockstotal</name>
<value>blocks_total</value>
</property>
<property>
<name>metric.hadoop_dfs_fsnamesystem.capacityremaininggb</name>
<value>capacity_remaining_gb</value>
</property>
<property>
<name>metric.hadoop_dfs_fsnamesystem.capacitytotalgb</name>
<value>capacity_total_gb</value>
</property>
<property>
<name>metric.hadoop_dfs_fsnamesystem.capacityusedgb</name>
<value>capacity_used_gb</value>
</property>
<property>
<name>metric.hadoop_dfs_fsnamesystem.filestotal</name>
<value>files_total</value>
</property>
<property>
<name>metric.hadoop_dfs_fsnamesystem.pendingreplicationblocks</name>
<value>pending_replication_blocks</value>
</property>
<property>
<name>metric.hadoop_dfs_fsnamesystem.scheduledreplicationblocks</name>
<value>scheduled_replication_blocks</value>
</property>
<property>
<name>metric.hadoop_dfs_fsnamesystem.totalload</name>
<value>total_load</value>
</property>
<property>
<name>metric.hadoop_dfs_fsnamesystem.underreplicatedblocks</name>
<value>under_replicated_blocks</value>
</property>
<!-- dfs fsdirectory metrics -->
<property>
<name>report.db.primary.key.hadoop_dfs_fsdirectory</name>
<value>timestamp</value>
</property>
<property>
<name>metric.hadoop_dfs_fsdirectory.csource</name>
<value>host</value>
</property>
<property>
<name>metric.hadoop_dfs_fsdirectory.files_deleted</name>
<value>files_deleted</value>
</property>
<!-- hadoop jvm metrics -->
<property>
<name>report.db.primary.key.hadoop_jvm_metrics</name>
<value>timestamp</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.csource</name>
<value>host</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.gctimemillis</name>
<value>gc_timemillis</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.gccount</name>
<value>gc_count</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.logerror</name>
<value>log_error</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.logfatal</name>
<value>log_fatal</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.loginfo</name>
<value>log_info</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.logwarn</name>
<value>log_warn</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.memheapcommittedm</name>
<value>mem_heap_committed_m</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.memheapusedm</name>
<value>mem_heap_used_m</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.memnonheapcommittedm</name>
<value>mem_non_heap_committed_m</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.memnonheapusedm</name>
<value>mem_non_heap_used_m</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.processname</name>
<value>process_name</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.threadsblocked</name>
<value>threads_blocked</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.threadsnew</name>
<value>threads_new</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.threadsrunnable</name>
<value>threads_runnable</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.threadsterminated</name>
<value>threads_terminated</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.threadstimedwaiting</name>
<value>threads_timed_waiting</value>
</property>
<property>
<name>metric.hadoop_jvm_metrics.threadswaiting</name>
<value>threads_waiting</value>
</property>
<!-- hadoop map/reduce metrics -->
<property>
<name>report.db.primary.key.hadoop_mapred_jobtracker</name>
<value>timestamp</value>
</property>
<property>
<name>metric.hadoop_mapred_jobtracker.csource</name>
<value>host</value>
</property>
<property>
<name>metric.hadoop_mapred_jobtracker.jobs_completed</name>
<value>jobs_completed</value>
</property>
<property>
<name>metric.hadoop_mapred_jobtracker.jobs_submitted</name>
<value>jobs_submitted</value>
</property>
<property>
<name>metric.hadoop_mapred_jobtracker.maps_completed</name>
<value>maps_completed</value>
</property>
<property>
<name>metric.hadoop_mapred_jobtracker.maps_launched</name>
<value>maps_launched</value>
</property>
<property>
<name>metric.hadoop_mapred_jobtracker.reduces_completed</name>
<value>reduces_completed</value>
</property>
<property>
<name>metric.hadoop_mapred_jobtracker.reduces_launched</name>
<value>reduces_launched</value>
</property>
<!-- hadoop rpc metrics -->
<property>
<name>report.db.primary.key.hadoop_rpc_metrics</name>
<value>timestamp</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.csource</name>
<value>host</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.rpcprocessingtime_avg_time</name>
<value>rpc_processing_time_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.rpcprocessingtime_num_ops</name>
<value>rpc_processing_time_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getbuildversion_avg_time</name>
<value>get_build_version_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getbuildversion_num_ops</name>
<value>get_build_version_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getjobcounters_avg_time</name>
<value>get_job_counters_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getjobcounters_num_ops</name>
<value>get_job_counters_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getjobprofile_avg_time</name>
<value>get_job_profile_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getjobprofile_num_ops</name>
<value>get_job_profile_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getjobstatus_avg_time</name>
<value>get_job_status_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getjobstatus_num_ops</name>
<value>get_job_status_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getnewjobid_avg_time</name>
<value>get_new_job_id_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getnewjobid_num_ops</name>
<value>get_new_job_id_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getprotocolversion_avg_time</name>
<value>get_protocol_version_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getprotocolversion_num_ops</name>
<value>get_protocol_version_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getsystemdir_avg_time</name>
<value>get_system_dir_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.getsystemdir_num_ops</name>
<value>get_system_dir_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.gettaskcompletionevents_avg_time</name>
<value>get_task_completion_events_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.gettaskcompletionevents_num_ops</name>
<value>get_task_completion_events_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.gettaskdiagnostics_avg_time</name>
<value>get_task_diagnostics_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.gettaskdiagnostics_num_ops</name>
<value>get_task_diagnostics_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.heartbeat_avg_time</name>
<value>heartbeat_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.heartbeat_num_ops</name>
<value>heartbeat_num_ops</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.submitjob_avg_time</name>
<value>submit_job_avg_time</value>
</property>
<property>
<name>metric.hadoop_rpc_metrics.submitjob_num_ops</name>
<value>submit_job_num_ops</value>
</property>
<!-- Hod Machine -->
<property>
<name>metric.hodmachine.machine</name>
<value>host</value>
</property>
<property>
<name>metric.hodmachine.hodid</name>
<value>hodid</value>
</property>
<!-- Hod Job -->
<property>
<name>metric.hodjob.hodid</name>
<value>HodID</value>
</property>
<property>
<name>metric.hodjob.userid</name>
<value>UserID</value>
</property>
<property>
<name>metric.hodjob.status</name>
<value>Status</value>
</property>
<property>
<name>metric.hodjob.timequeued</name>
<value>TimeQueued</value>
</property>
<property>
<name>metric.hodjob.starttime</name>
<value>StartTime</value>
</property>
<property>
<name>metric.hodjob.endtime</name>
<value>EndTime</value>
</property>
<property>
<name>metric.hodjob.numofmachines</name>
<value>NumOfMachines</value>
</property>
<!-- Map Reduce Job Counters -->
<property>
<name>report.db.primary.key.mrjobcounters</name>
<value>timestamp</value>
</property>
<property>
<name>metric.mrjobcounters.file_systems_hdfs_bytes_read</name>
<value>hdfs_bytes_read</value>
</property>
<property>
<name>metric.mrjobcounters.file_systems_hdfs_bytes_written</name>
<value>hdfs_bytes_written</value>
</property>
<property>
<name>metric.mrjobcounters.file_systems_local_bytes_read</name>
<value>local_bytes_read</value>
</property>
<property>
<name>metric.mrjobcounters.file_systems_local_bytes_written</name>
<value>local_bytes_written</value>
</property>
<property>
<name>metric.mrjobcounters.job_counters__data-local_map_tasks</name>
<value>data_local_map_tasks</value>
</property>
<property>
<name>metric.mrjobcounters.job_counters__launched_map_tasks</name>
<value>launched_map_tasks</value>
</property>
<property>
<name>metric.mrjobcounters.job_counters__launched_reduce_tasks</name>
<value>launched_reduce_tasks</value>
</property>
<property>
<name>metric.mrjobcounters.jobid</name>
<value>job_id</value>
</property>
<property>
<name>metric.mrjobcounters.map-reduce_framework_combine_input_records</name>
<value>combine_input_records</value>
</property>
<property>
<name>metric.mrjobcounters.map-reduce_framework_combine_output_records</name>
<value>combine_output_records</value>
</property>
<property>
<name>metric.mrjobcounters.map-reduce_framework_map_input_bytes</name>
<value>map_input_bytes</value>
</property>
<property>
<name>metric.mrjobcounters.map-reduce_framework_map_output_bytes</name>
<value>map_output_bytes</value>
</property>
<property>
<name>metric.mrjobcounters.map-reduce_framework_map_input_records</name>
<value>map_input_records</value>
</property>
<property>
<name>metric.mrjobcounters.map-reduce_framework_map_output_records</name>
<value>map_output_records</value>
</property>
<property>
<name>metric.mrjobcounters.map-reduce_framework_reduce_input_groups</name>
<value>reduce_input_groups</value>
</property>
<property>
<name>metric.mrjobcounters.map-reduce_framework_reduce_input_records</name>
<value>reduce_input_records</value>
</property>
<property>
<name>metric.mrjobcounters.map-reduce_framework_reduce_output_records</name>
<value>reduce_output_records</value>
</property>
<!-- Database summarization intervals -->
<property>
<name>consolidator.table.dfs_namenode</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.dfs_datanode</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.hadoop_rpc</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.cluster_hadoop_rpc</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.hadoop_mapred</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.hadoop_jvm</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.system_metrics</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.dfs_throughput</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.node_activity</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.dfs_fsnamesystem</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.disk</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.cluster_disk</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.cluster_system_metrics</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.hod_job_digest</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.hod_machine</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.HodJob</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.user_util</name>
<value>5,30,180,720</value>
</property>
<property>
<name>report.db.name.hdfsusage</name>
<value>hdfs_usage</value>
<description></description>
</property>
<property>
<name>metric.hdfsusage.user</name>
<value>user</value>
</property>
<property>
<name>metric.hdfsusage.bytes</name>
<value>bytes</value>
</property>
<property>
<name>metric.hdfsusage.timestamp</name>
<value>timestamp</value>
</property>
<property>
<name>consolidator.table.hdfs_usage</name>
<value>5,30,180,720</value>
</property>
<!-- start mapping for jobdata -->
<property>
<name>report.db.name.jobdata</name>
<value>mr_job</value>
<description></description>
</property>
<property>
<name>consolidator.table.mr_job</name>
<value>5,30,180,720</value>
</property>
<property>
<name>metric.jobdata.jobid</name>
<value>job_id</value>
</property>
<property>
<name>metric.jobdata.user</name>
<value>user</value>
</property>
<property>
<name>metric.jobdata.mapred.job.queue.name</name>
<value>queue</value>
</property>
<property>
<name>metric.jobdata.job_final_status</name>
<value>status</value>
</property>
<property>
<name>metric.jobdata.submit_time</name>
<value>submit_time</value>
</property>
<property>
<name>metric.jobdata.launch_time</name>
<value>launch_time</value>
</property>
<property>
<name>metric.jobdata.finish_time</name>
<value>finish_time</value>
</property>
<property>
<name>metric.jobdata.jobconf-json</name>
<value>jobconf</value>
</property>
<property>
<name>metric.jobdata.jobconf</name>
<value>_delete</value>
<description>Explicitly undefined jobconf path to load to database</description>
</property>
<property>
<name>metric.jobdata.finished_maps</name>
<value>finished_maps</value>
</property>
<property>
<name>metric.jobdata.finished_reduces</name>
<value>finished_reduces</value>
</property>
<property>
<name>metric.jobdata.failed_maps</name>
<value>failed_maps</value>
</property>
<property>
<name>metric.jobdata.failed_reduces</name>
<value>failed_reduces</value>
</property>
<property>
<name>metric.jobdata.total_maps</name>
<value>total_maps</value>
</property>
<property>
<name>metric.jobdata.total_reduces</name>
<value>total_reduces</value>
</property>
<property>
<name>metric.jobdata.reduce_shuffle_bytes</name>
<value>reduce_shuffle_bytes</value>
</property>
<!-- for hadoop 0.20 -->
<property>
<name>metric.jobdata.counter:filesystemcounters:hdfs_bytes_read</name>
<value>hdfs_bytes_read</value>
</property>
<property>
<name>metric.jobdata.counter:filesystemcounters:hdfs_bytes_written</name>
<value>hdfs_bytes_written</value>
</property>
<property>
<name>metric.jobdata.counter:filesystemcounters:file_bytes_read</name>
<value>local_bytes_read</value>
</property>
<property>
<name>metric.jobdata.counter:filesystemcounters:file_bytes_written</name>
<value>local_bytes_written</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.jobinprogress$counter:total_launched_maps</name>
<value>launched_map_tasks</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.jobinprogress$counter:total_launched_reduces</name>
<value>launched_reduce_tasks</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.jobinprogress$counter:data_local_maps</name>
<value>data_local_map_tasks</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.jobinprogress$counter:data_local_reduces</name>
<value>data_local_reduce_tasks</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:map_input_bytes</name>
<value>map_input_bytes</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:map_output_bytes</name>
<value>map_output_bytes</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:map_input_records</name>
<value>map_input_records</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:map_output_records</name>
<value>map_output_records</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:combine_input_records</name>
<value>combine_input_records</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:combine_output_records</name>
<value>combine_output_records</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:spilled_records</name>
<value>spilled_records</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:reduce_input_groups</name>
<value>reduce_input_groups</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:reduce_output_groups</name>
<value>reduce_output_groups</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:reduce_input_records</name>
<value>reduce_input_records</value>
</property>
<property>
<name>metric.jobdata.counter:org.apache.hadoop.mapred.task$counter:reduce_output_records</name>
<value>reduce_output_records</value>
</property>
<!-- end for hadoop 0.20 -->
<!-- for hadoop 0.18 -->
<property>
<name>metric.jobdata.counter:hadoop18:file-systems.hdfs-bytes-read</name>
<value>hdfs_bytes_read</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:file-systems.hdfs-bytes-written</name>
<value>hdfs_bytes_written</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:file-systems.local-bytes-read</name>
<value>local_bytes_read</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:file-systems.local-bytes-written</name>
<value>local_bytes_written</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:job-counters-.launched-map-tasks</name>
<value>launched_map_tasks</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:job-counters-.launched-reduce-tasks</name>
<value>launched_reduce_tasks</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:job-counters-.data-local-map-tasks</name>
<value>data_local_map_tasks</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:job-counters-.data-local-reduce-tasks</name>
<value>data_local_reduce_tasks</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.map-input-bytes</name>
<value>map_input_bytes</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.map-output-bytes</name>
<value>map_output_bytes</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.map-input-records</name>
<value>map_input_records</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.map-output-records</name>
<value>map_output_records</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.combine-input-records</name>
<value>combine_input_records</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.combine-output-records</name>
<value>combine_output_records</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.spilled-records</name>
<value>spilled_records</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.reduce-input-groups</name>
<value>reduce_input_groups</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.reduce-output-groups</name>
<value>reduce_output_groups</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.reduce-input-records</name>
<value>reduce_input_records</value>
</property>
<property>
<name>metric.jobdata.counter:hadoop18:map-reduce-framework.reduce-output-records</name>
<value>reduce_output_records</value>
</property>
<!-- end for hadoop 0.18 -->
<!-- end mapping for jobdata -->
<!-- start mapping for taskdata -->
<property>
<name>report.db.name.taskdata</name>
<value>mr_task</value>
<description></description>
</property>
<property>
<name>consolidator.table.mr_task</name>
<value>5,30,180,720</value>
</property>
<property>
<name>metric.taskdata.jobid</name>
<value>job_id</value>
</property>
<property>
<name>metric.taskdata.task_attempt_id</name>
<value>task_id</value>
</property>
<property>
<name>metric.taskdata.start_time</name>
<value>start_time</value>
</property>
<property>
<name>metric.taskdata.finish_time</name>
<value>finish_time</value>
</property>
<property>
<name>metric.taskdata.task_status</name>
<value>status</value>
</property>
<property>
<name>metric.taskdata.task_attempt_times</name>
<value>attempts</value>
</property>
<property>
<name>metric.taskdata.hostname</name>
<value>hostname</value>
</property>
<property>
<name>metric.taskdata.shuffle_finished</name>
<value>shuffle_finished</value>
</property>
<property>
<name>metric.taskdata.sort_finished</name>
<value>sort_finished</value>
</property>
<property>
<name>metric.taskdata.spilts</name>
<value>spilts</value>
</property>
<property>
<name>metric.taskdata.task_type</name>
<value>type</value>
</property>
<!-- for hadoop 20 -->
<property>
<name>metric.taskdata.counter:filesystemcounters:hdfs_bytes_read</name>
<value>hdfs_bytes_read</value>
</property>
<property>
<name>metric.taskdata.counter:filesystemcounters:hdfs_bytes_written</name>
<value>hdfs_bytes_written</value>
</property>
<property>
<name>metric.taskdata.counter:filesystemcounters:file_bytes_read</name>
<value>local_bytes_read</value>
</property>
<property>
<name>metric.taskdata.counter:filesystemcounters:file_bytes_written</name>
<value>local_bytes_written</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:map_input_bytes</name>
<value>map_input_bytes</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:map_output_bytes</name>
<value>map_output_bytes</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:map_input_records</name>
<value>map_input_records</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:map_output_records</name>
<value>map_output_records</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:combine_input_records</name>
<value>combine_input_records</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:combine_output_records</name>
<value>combine_output_records</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:spilled_records</name>
<value>spilled_records</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:reduce_input_groups</name>
<value>reduce_input_groups</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:reduce_output_groups</name>
<value>reduce_output_groups</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:reduce_input_bytes</name>
<value>reduce_input_bytes</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:reduce_output_bytes</name>
<value>reduce_output_bytes</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:reduce_input_records</name>
<value>reduce_input_records</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:reduce_output_records</name>
<value>reduce_output_records</value>
</property>
<property>
<name>metric.taskdata.counter:org.apache.hadoop.mapred.task$counter:reduce_shuffle_bytes</name>
<value>reduce_shuffle_bytes</value>
</property>
<!-- end for hadoop 20 -->
<!-- for hadoop 18 -->
<property>
<name>metric.taskdata.counter:hadoop18:file-systems.hdfs-bytes-read</name>
<value>hdfs_bytes_read</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:file-systems.hdfs-bytes-written</name>
<value>hdfs_bytes_written</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:file-systems.local-bytes-read</name>
<value>local_bytes_read</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:file-systems.local-bytes-written</name>
<value>local_bytes_written</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.map-input-bytes</name>
<value>map_input_bytes</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.map-output-bytes</name>
<value>map_output_bytes</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.map-input-records</name>
<value>map_input_records</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.map-output-records</name>
<value>map_output_records</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.combine-input-records</name>
<value>combine_input_records</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.combine-output-records</name>
<value>combine_output_records</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.spilled-records</name>
<value>spilled_records</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.reduce-input-groups</name>
<value>reduce_input_groups</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.reduce-output-groups</name>
<value>reduce_output_groups</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.reduce-input-records</name>
<value>reduce_input_records</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.reduce-output-records</name>
<value>reduce_output_records</value>
</property>
<property>
<name>metric.taskdata.counter:hadoop18:map-reduce-framework.reduce_shuffle_bytes</name>
<value>reduce_shuffle_bytes</value>
</property>
<!-- end for hadoop 18 -->
<property>
<name>report.db.name.hdfsusage</name>
<value>hdfs_usage</value>
<description></description>
</property>
<property>
<name>metric.hdfsusage.user</name>
<value>user</value>
</property>
<property>
<name>metric.hdfsusage.bytes</name>
<value>bytes</value>
</property>
<property>
<name>metric.hdfsusage.timestamp</name>
<value>timestamp</value>
</property>
<property>
<name>consolidator.table.hdfs_usage</name>
<value>5,30,180,720</value>
</property>
<property>
<name>consolidator.table.ClientTrace</name>
<value>5,30,180,720</value>
</property>
<property>
<name>report.db.name.util</name>
<value>util</value>
<description></description>
</property>
<property>
<name>consolidator.table.util</name>
<value>5,30,180,720</value>
</property>
<!-- for agent metrics chunkqueue -->
<property><name>report.db.name.chunkqueue</name><value>chunkqueue</value></property>
<property><name>consolidator.table.chunkqueue</name><value>5,30,180,720</value></property>
<property><name>metric.chunkqueue.chukwa_timestamp</name><value>chukwa_timestamp</value></property>
<property><name>metric.chunkqueue.recordname</name><value>recordname</value></property>
<property><name>metric.chunkqueue.hostname</name><value>hostname</value></property>
<property><name>metric.chunkqueue.contextname</name><value>contextname</value></property>
<property><name>metric.chunkqueue.removedchunk</name><value>removedchunk</value></property>
<property><name>metric.chunkqueue.queuesize</name><value>queuesize</value></property>
<property><name>metric.chunkqueue.removedchunk_raw</name><value>removedchunk_raw</value></property>
<property><name>metric.chunkqueue.datasize</name><value>datasize</value></property>
<property><name>metric.chunkqueue.fullqueue</name><value>fullqueue</value></property>
<property><name>metric.chunkqueue.addedchunk_rate</name><value>addedchunk_rate</value></property>
<property><name>metric.chunkqueue.addedchunk_raw</name><value>addedchunk_raw</value></property>
<property><name>metric.chunkqueue.period</name><value>period</value></property>
<property><name>metric.chunkqueue.addedchunk</name><value>addedchunk</value></property>
<property><name>metric.chunkqueue.removedchunk_rate</name><value>removedchunk_rate</value></property>
<!-- for agent metrics chukwaagent -->
<property><name>report.db.name.chukwaagent</name><value>chukwaagent</value></property>
<property><name>consolidator.table.chukwaagent</name><value>5,30,180,720</value></property>
<property><name>metric.chukwaagent.chukwa_timestamp</name><value>chukwa_timestamp</value></property>
<property><name>metric.chukwaagent.recordname</name><value>recordname</value></property>
<property><name>metric.chukwaagent.hostname</name><value>hostname</value></property>
<property><name>metric.chukwaagent.contextname</name><value>contextname</value></property>
<property><name>metric.chukwaagent.addedadaptor_rate</name><value>addedadaptor_rate</value></property>
<property><name>metric.chukwaagent.addedadaptor_raw</name><value>addedadaptor_raw</value></property>
<property><name>metric.chukwaagent.removedadaptor_rate</name><value>removedadaptor_rate</value></property>
<property><name>metric.chukwaagent.removedadaptor</name><value>removedadaptor</value></property>
<property><name>metric.chukwaagent.period</name><value>period</value></property>
<property><name>metric.chukwaagent.adaptorcount</name><value>adaptorcount</value></property>
<property><name>metric.chukwaagent.removedadaptor_raw</name><value>removedadaptor_raw</value></property>
<property><name>metric.chukwaagent.process</name><value>process</value></property>
<property><name>metric.chukwaagent.addedadaptor</name><value>addedadaptor</value></property>
<!-- for agent metrics chukwahttpsender -->
<property><name>report.db.name.chukwahttpsender</name><value>chukwahttpsender</value></property>
<property><name>consolidator.table.chukwahttpsender</name><value>5,30,180,720</value></property>
<property><name>metric.chukwahttpsender.chukwa_timestamp</name><value>chukwa_timestamp</value></property>
<property><name>metric.chukwahttpsender.recordname</name><value>recordname</value></property>
<property><name>metric.chukwahttpsender.hostname</name><value>hostname</value></property>
<property><name>metric.chukwahttpsender.contextname</name><value>contextname</value></property>
<property><name>metric.chukwahttpsender.httppost_rate</name><value>httppost_rate</value></property>
<property><name>metric.chukwahttpsender.httpthrowable_raw</name><value>httpthrowable_raw</value></property>
<property><name>metric.chukwahttpsender.httpexception_rate</name><value>httpexception_rate</value></property>
<property><name>metric.chukwahttpsender.httpthrowable</name><value>httpthrowable</value></property>
<property><name>metric.chukwahttpsender.httpthrowable_rate</name><value>httpthrowable_rate</value></property>
<property><name>metric.chukwahttpsender.collectorrollover_rate</name><value>collectorrollover_rate</value></property>
<property><name>metric.chukwahttpsender.httppost_raw</name><value>httppost_raw</value></property>
<property><name>metric.chukwahttpsender.period</name><value>period</value></property>
<property><name>metric.chukwahttpsender.httpexception_raw</name><value>httpexception_raw</value></property>
<property><name>metric.chukwahttpsender.httppost</name><value>httppost</value></property>
<property><name>metric.chukwahttpsender.httptimeoutexception</name><value>httptimeoutexception</value></property>
<property><name>metric.chukwahttpsender.httptimeoutexception_raw</name><value>httptimeoutexception_raw</value></property>
<property><name>metric.chukwahttpsender.collectorrollover_raw</name><value>collectorrollover_raw</value></property>
<property><name>metric.chukwahttpsender.collectorrollover</name><value>collectorrollover</value></property>
<property><name>metric.chukwahttpsender.httptimeoutexception_rate</name><value>httptimeoutexception_rate</value></property>
<property><name>metric.chukwahttpsender.httpexception</name><value>httpexception</value></property>
<!-- for job conf parameters -->
<property><name>report.db.name.jobconfdata</name><value>mr_job_conf</value></property>
<property><name>consolidator.table.mr_job_conf</name><value>5,30,180,720</value></property>
<property><name>metric.jobconfdata.jobid</name><value>job_id</value></property>
<property><name>metric.jobconfdata.job_conf.mapred.output.key.class</name><value>mr_output_key_cls</value></property>
<property><name>metric.jobconfdata.job_conf.mapred.map.runner.class</name><value>mr_runner_cls</value></property>
<property><name>metric.jobconfdata.job_conf.mapred.output.value.class</name><value>mr_output_value_cls</value></property>
<property><name>metric.jobconfdata.job_conf.mapred.input.format.class</name><value>mr_input_fmt_cls</value></property>
<property><name>metric.jobconfdata.job_conf.mapred.output.format.class</name><value>mr_output_fmt_cls</value></property>
<property><name>metric.jobconfdata.job_conf.mapred.reducer.class</name><value>mr_reducer_cls</value></property>
<property><name>metric.jobconfdata.job_conf.mapred.mapper.class</name><value>mr_mapper_cls</value></property>
<!-- SALSA mapreduce fsm additions -->
<property><name>report.db.name.mapreduce_fsm</name><value>mapreduce_fsm</value></property>
<property><name>report.db.primary.key.mapreduce_fsm</name><value>unique_id</value></property>
<property><name>consolidator.table.mapreduce_fsm</name><value>5,30,180,720</value></property>
<property><name>metric.mapreduce_fsm.job_id</name><value>job_id</value></property>
<property><name>metric.mapreduce_fsm.state_uniq_id</name><value>unique_id</value></property>
<property><name>metric.mapreduce_fsm.task_id</name><value>friendly_id</value></property>
<property><name>metric.mapreduce_fsm.state_name</name><value>state_name</value></property>
<property><name>metric.mapreduce_fsm.host</name><value>hostname</value></property>
<property><name>metric.mapreduce_fsm.host_other</name><value>other_host</value> </property>
<property><name>metric.mapreduce_fsm.time_start</name><value>start_time</value></property>
<property><name>metric.mapreduce_fsm.time_end</name><value>finish_time</value></property>
<property><name>metric.mapreduce_fsm.time_start_millis</name><value>start_time_millis</value></property>
<property><name>metric.mapreduce_fsm.time_end_millis</name><value>finish_time_millis</value></property>
<property><name>metric.mapreduce_fsm.state_string</name><value>status</value></property>
<property><name>metric.mapreduce_fsm.counter_file_bytes_read</name><value>file_bytes_read</value></property>
<property><name>metric.mapreduce_fsm.counter_file_bytes_written</name><value>file_bytes_written</value></property>
<property><name>metric.mapreduce_fsm.counter_combine_input_records</name><value>combine_input_records</value></property>
<property><name>metric.mapreduce_fsm.counter_combine_output_records</name><value>combine_output_records</value></property>
<property><name>metric.mapreduce_fsm.counter_input_records</name><value>input_records</value></property>
<property><name>metric.mapreduce_fsm.counter_output_records</name><value>output_records</value></property>
<property><name>metric.mapreduce_fsm.counter_input_bytes</name><value>input_bytes</value></property>
<property><name>metric.mapreduce_fsm.counter_output_bytes</name><value>output_bytes</value></property>
<property><name>metric.mapreduce_fsm.counter_input_groups</name><value>input_groups</value></property>
<property><name>metric.mapreduce_fsm.counter_spilled_records</name><value>spilled_records</value></property>
<!-- for user daily aggregation -->
<property><name>report.db.name.userdailysummary</name><value>user_job_summary</value></property>
<property><name>consolidator.table.user_job_summary</name><value>5,30,180,720</value></property>
<!-- SALSA filesystem fsm additions -->
<property><name>report.db.name.filesystem_fsm</name><value>filesystem_fsm</value></property>
<property><name>report.db.primary.key.filesystem_fsm</name><value>unique_id</value></property>
<property><name>consolidator.table.filesystem_fsm</name><value>5,30,180,720</value></property>
<property><name>metric.filesystem_fsm.job_id</name><value>client_id</value></property>
<property><name>metric.filesystem_fsm.state_uniq_id</name><value>unique_id</value></property>
<property><name>metric.filesystem_fsm.task_id</name><value>block_id</value></property>
<property><name>metric.filesystem_fsm.state_name</name><value>state_name</value></property>
<property><name>metric.filesystem_fsm.host</name><value>hostname</value></property>
<property><name>metric.filesystem_fsm.host_other</name><value>other_host</value> </property>
<property><name>metric.filesystem_fsm.time_start</name><value>start_time</value></property>
<property><name>metric.filesystem_fsm.time_end</name><value>finish_time</value></property>
<property><name>metric.filesystem_fsm.time_start_millis</name><value>start_time_millis</value></property>
<property><name>metric.filesystem_fsm.time_end_millis</name><value>finish_time_millis</value></property>
<property><name>metric.filesystem_fsm.state_string</name><value>status</value></property>
<property><name>metric.filesystem_fsm.counter_bytes</name><value>bytes</value></property>
</configuration>