blob: 84afb5efafaa560a9cbfcabe904cb22261a8fcb5 [file] [log] [blame]
#-------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#-------------------------------------------------------------
# Define some default values that can be overridden by system properties
hadoop.root.logger=INFO,console
hadoop.log.dir=.
hadoop.log.file=hadoop.log
hadoop.security.logger=OFF
# Security appender
log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}
log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}
#
# Job Summary Appender
#
# Use following logger to send summary to separate file defined by
# hadoop.mapreduce.jobsummary.log.file rolled daily:
# hadoop.mapreduce.jobsummary.logger=INFO,JSA
#
hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
# Define the root logger to the system property "hadoop.root.logger".
log4j.rootLogger=${hadoop.root.logger}, EventCounter
# Logging Threshold
log4j.threshold=ALL
#
# Guardim Proxy setup - HDFS, MapReduce and Hadoop RPC
#
log4j.appender.GuardiumProxyAppender=org.apache.log4j.net.SocketAppender
log4j.appender.GuardiumProxyAppender.RemoteHost=
log4j.appender.GuardiumProxyAppender.Port=
log4j.appender.GuardiumProxyAppender.RecoveryFile=audit-${hadoop.log.file}
log4j.appender.GuardiumProxyAppender.Threshold=INFO
# Hdfs audit logs
log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hadoop.security.logger}
hdfs.audit.logger=INFO,NullAppender
hdfs.audit.log.maxfilesize=256MB
hdfs.audit.log.maxbackupindex=20
log4j.appender.RFAAUDIT=org.apache.log4j.RollingFileAppender
log4j.appender.RFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log
log4j.appender.RFAAUDIT.layout=org.apache.log4j.PatternLayout
log4j.appender.RFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
log4j.appender.RFAAUDIT.MaxFileSize=${hdfs.audit.log.maxfilesize}
log4j.appender.RFAAUDIT.MaxBackupIndex=${hdfs.audit.log.maxbackupindex}
# MapReduce audit logs
log4j.additivity.org.apache.hadoop.mapred.AuditLogger=false
log4j.logger.org.apache.hadoop.mapred.AuditLogger=${hadoop.security.logger}
mapred.audit.logger=INFO,NullAppender
mapred.audit.log.maxfilesize=256MB
mapred.audit.log.maxbackupindex=20
log4j.appender.MRAUDIT=org.apache.log4j.RollingFileAppender
log4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log
log4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout
log4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
log4j.appender.MRAUDIT.MaxFileSize=${mapred.audit.log.maxfilesize}
log4j.appender.MRAUDIT.MaxBackupIndex=${mapred.audit.log.maxbackupindex}
# Hadoop RPC audit logs
log4j.additivity.SecurityLogger=false
log4j.logger.SecurityLogger=${hadoop.security.logger}
log4j.appender.hadoopaudit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.hadoopaudit.DatePattern='.'yyyy-MM-dd
log4j.appender.hadoopaudit.File=${hadoop.log.dir}/audit-${hadoop.log.file}
log4j.appender.hadoopaudit.Append=true
log4j.appender.hadoopaudit.layout=org.apache.log4j.PatternLayout
log4j.appender.hadoopaudit.layout.ConversionPattern=%d{ISO8601} %5p %c - %m%n
#
# Daily Rolling File Appender
#
#log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
#log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
# Rollver at midnight
#log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
#log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
# Debugging Pattern format
#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
#
# TaskLog Appender
#
#Default values
hadoop.tasklog.taskid=null
hadoop.tasklog.iscleanup=false
hadoop.tasklog.noKeepSplits=4
hadoop.tasklog.totalLogFileSize=100
hadoop.tasklog.purgeLogSplits=true
hadoop.tasklog.logsRetainHours=12
log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogSocketAppender
log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
#
#Security audit appender
#
hadoop.security.log.file=SecurityAuth.audit
log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
#
# Rolling File Appender
#
#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
# Logfile size and and 30-day backups
#log4j.appender.RFA.MaxFileSize=1MB
#log4j.appender.RFA.MaxBackupIndex=30
#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
#
# Rolling File Appender
#
log4j.appender.RFA=org.apache.log4j.RollingFileAppender
log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
# Logfile size and and 30-day backups
log4j.appender.RFA.MaxFileSize=10MB
log4j.appender.RFA.MaxBackupIndex=3
log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
#
#Logger for streaming Job Configuration
#
log4j.logger.org.apache.hadoop.mapred.JobTrackerConfLogStreaming=INFO,${SAJC}
log4j.additivity.org.apache.hadoop.mapred.JobTrackerConfLogStreaming=false
#
#Socket Appender for streaming Job Configuration
#
log4j.appender.job.conf=org.apache.log4j.net.SocketAppender
log4j.appender.job.conf.RemoteHost=localhost
log4j.appender.job.conf.Port=${JOBCONF_LOGGING_PORT}
log4j.appender.job.conf.layout=org.apache.log4j.PatternLayout
log4j.appender.job.conf.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.appender.job.conf.appender.ReconnectionDelay=120000
#
#Logger for streaming task attempt logs
#
log4j.logger.org.apache.hadoop.mapred.TaskLogSocketAppender=INFO,${SATA}
log4j.additivity.org.apache.hadoop.mapred.TaskLogSocketAppender=false
#
#Socket appender for streaming task attempt logs
#
log4j.appender.task.attempt.log=org.apache.log4j.net.SocketAppender
log4j.appender.task.attempt.log.RemoteHost=localhost
log4j.appender.task.attempt.log.Port=${TASKATTEMPT_LOGGING_PORT}
log4j.appender.task.attempt.log.layout=org.apache.log4j.PatternLayout
log4j.appender.task.attempt.log.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.appender.task.attempt.log.appender.ReconnectionDelay=120000
#
#Socket Appender for Streaming NameNode,SecondaryNameNode and JobTracker Logs
#
log4j.appender.socket.appender=org.apache.log4j.net.SocketAppender
log4j.appender.socket.appender.RemoteHost=localhost
log4j.appender.socket.appender.Port=${HADOOP_LOGGING_PORT}
log4j.appender.socket.appender.layout=org.apache.log4j.PatternLayout
log4j.appender.socket.appender.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.appender.socket.appender.ReconnectionDelay=120000
#
#Logger for streaming Job History Logs
#
log4j.logger.JobHistoryLogs=INFO,${SAJH}
log4j.additivity.JobHistoryLogs=false
#
#Socket Appender for Job History Logs
#
log4j.appender.job.history.log=org.apache.log4j.net.SocketAppender
log4j.appender.job.history.log.RemoteHost=localhost
log4j.appender.job.history.log.Port=${JOBHISTORY_LOGGING_PORT}
log4j.appender.job.history.log.layout=org.apache.log4j.PatternLayout
log4j.appender.job.history.log.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.appender.job.history.appender.ReconnectionDelay=120000
# Custom Logging levels
hadoop.metrics.log.level=INFO
#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
# Jets3t library
log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
#
# Null Appender
# Trap security logger on the hadoop client side
#
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
#
# Event Counter Appender
# Sends counts of logging messages at different severity levels to Hadoop Metrics.
#
log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
#
# Job Summary Appender
#
log4j.appender.JSA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.JSA.File=${hadoop.log.dir}/${hadoop.mapreduce.jobsummary.log.file}
log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
log4j.appender.JSA.DatePattern=.yyyy-MM-dd
log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false