blob: 3d6e988c002959c7e12a86107e51170639fb11e1 [file] [log] [blame]
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Define some default values that can be overridden by system properties
hadoop.root.logger=INFO,console
hadoop.log.dir=.
hadoop.log.file=hadoop.log
#
# Job Summary Appender
#
# Use following logger to send summary to separate file defined by
# hadoop.mapreduce.jobsummary.log.file rolled daily:
hadoop.mapreduce.jobsummary.logger=INFO,JSA
#
#hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
# Define the root logger to the system property "hadoop.root.logger".
log4j.rootLogger=${hadoop.root.logger}, EventCounter
# Logging Threshold
log4j.threshhold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
# Debugging Pattern format
#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
#
# TaskLog Appender
#
#Default values
hadoop.tasklog.taskid=null
hadoop.tasklog.iscleanup=false
hadoop.tasklog.noKeepSplits=4
hadoop.tasklog.totalLogFileSize=100
hadoop.tasklog.purgeLogSplits=true
hadoop.tasklog.logsRetainHours=12
log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
#
#Security audit appender
#
hadoop.security.log.file=SecurityAuth.audit
log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
#new logger
log4j.logger.SecurityLogger=OFF,console
log4j.logger.SecurityLogger.additivity=false
#
# Rolling File Appender
#
#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
# Logfile size and and 30-day backups
#log4j.appender.RFA.MaxFileSize=1MB
#log4j.appender.RFA.MaxBackupIndex=30
#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
#
# FSNamesystem Audit logging
# All audit events are logged at INFO level
#
log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=WARN
# Custom Logging levels
hadoop.metrics.log.level=INFO
#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
# Jets3t library
log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
#
# Null Appender
# Trap security logger on the hadoop client side
#
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
#
# Event Counter Appender
# Sends counts of logging messages at different severity levels to Hadoop Metrics.
#
log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
#
# Job Summary Appender
#
log4j.appender.JSA=org.apache.log4j.net.SocketAppender
log4j.appender.JSA.RemoteHost=localhost
log4j.appender.JSA.Port=9098
log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false
#
# AUDIT LOGGING - All audit events are logged at INFO level
#
# CHUKWA AUDIT LOG
log4j.appender.DRFAAUDIT=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
log4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/audit.log
log4j.appender.DRFAAUDIT.recordType=HadoopLog
log4j.appender.DRFAAUDIT.chukwaClientHostname=localhost
log4j.appender.DRFAAUDIT.chukwaClientPortNum=9093
log4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd
log4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout
log4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=INFO,DRFAAUDIT
log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
# ClientTrace (Shuffle bytes)
log4j.appender.MR_CLIENTTRACE=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
log4j.appender.MR_CLIENTTRACE.File=${hadoop.log.dir}/mr_clienttrace.log
log4j.appender.MR_CLIENTTRACE.recordType=ClientTrace
log4j.appender.MR_CLIENTTRACE.chukwaClientHostname=localhost
log4j.appender.MR_CLIENTTRACE.chukwaClientPortNum=9093
log4j.appender.MR_CLIENTTRACE.DatePattern=.yyyy-MM-dd
log4j.appender.MR_CLIENTTRACE.layout=org.apache.log4j.PatternLayout
log4j.appender.MR_CLIENTTRACE.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.logger.org.apache.hadoop.mapred.TaskTracker.clienttrace=INFO,MR_CLIENTTRACE
log4j.additivity.org.apache.hadoop.mapred.TaskTracker.clienttrace=false
# ClientTrace (HDFS bytes)
log4j.appender.HDFS_CLIENTTRACE=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
log4j.appender.HDFS_CLIENTTRACE.File=${hadoop.log.dir}/hdfs_clienttrace.log
log4j.appender.HDFS_CLIENTTRACE.recordType=ClientTrace
log4j.appender.HDFS_CLIENTTRACE.chukwaClientHostname=localhost
log4j.appender.HDFS_CLIENTTRACE.chukwaClientPortNum=9093
log4j.appender.HDFS_CLIENTTRACE.DatePattern=.yyyy-MM-dd
log4j.appender.HDFS_CLIENTTRACE.layout=org.apache.log4j.PatternLayout
log4j.appender.HDFS_CLIENTTRACE.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.logger.org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace=INFO,HDFS_CLIENTTRACE
log4j.additivity.org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace=false