| # Licensed to the Apache Software Foundation (ASF) under one |
| # or more contributor license agreements. See the NOTICE file |
| # distributed with this work for additional information |
| # regarding copyright ownership. The ASF licenses this file |
| # to you under the Apache License, Version 2.0 (the |
| # "License"); you may not use this file except in compliance |
| # with the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| |
| # Define some default values that can be overridden by system properties |
| hadoop.root.logger=INFO,console |
| hadoop.log.dir=. |
| hadoop.log.file=hadoop.log |
| |
| # Define the root logger to the system property "hadoop.root.logger". |
| log4j.rootLogger=${hadoop.root.logger}, EventCounter |
| |
| # Logging Threshold |
| log4j.threshold=ALL |
| |
| # Null Appender |
| log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender |
| |
| # |
| # Rolling File Appender - cap space usage at 5gb. |
| # |
| hadoop.log.maxfilesize=256MB |
| hadoop.log.maxbackupindex=20 |
| log4j.appender.RFA=org.apache.log4j.RollingFileAppender |
| log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file} |
| |
| log4j.appender.RFA.MaxFileSize=${hadoop.log.maxfilesize} |
| log4j.appender.RFA.MaxBackupIndex=${hadoop.log.maxbackupindex} |
| |
| log4j.appender.RFA.layout=org.apache.log4j.PatternLayout |
| |
| # Pattern format: Date LogLevel LoggerName LogMessage |
| log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n |
| # Debugging Pattern format |
| #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n |
| |
| |
| # |
| # Daily Rolling File Appender |
| # |
| |
| log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender |
| log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file} |
| |
| # Rollover at midnight |
| log4j.appender.DRFA.DatePattern=.yyyy-MM-dd |
| |
| log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout |
| |
| # Pattern format: Date LogLevel LoggerName LogMessage |
| log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n |
| # Debugging Pattern format |
| #log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n |
| |
| |
| # |
| # console |
| # Add "console" to rootlogger above if you want to use this |
| # |
| |
| log4j.appender.console=org.apache.log4j.ConsoleAppender |
| log4j.appender.console.target=System.err |
| log4j.appender.console.layout=org.apache.log4j.PatternLayout |
| log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n |
| |
| # |
| # TaskLog Appender |
| # |
| log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender |
| |
| log4j.appender.TLA.layout=org.apache.log4j.PatternLayout |
| log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n |
| |
| # |
| # HDFS block state change log from block manager |
| # |
| # Uncomment the following to log normal block state change |
| # messages from BlockManager in NameNode. |
| #log4j.logger.BlockStateChange=DEBUG |
| |
| # |
| #Security appender |
| # |
| hadoop.security.logger=INFO,NullAppender |
| hadoop.security.log.maxfilesize=256MB |
| hadoop.security.log.maxbackupindex=20 |
| log4j.category.SecurityLogger=${hadoop.security.logger} |
| hadoop.security.log.file=SecurityAuth-${user.name}.audit |
| log4j.appender.RFAS=org.apache.log4j.RollingFileAppender |
| log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file} |
| log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout |
| log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n |
| log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize} |
| log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex} |
| |
| # |
| # Daily Rolling Security appender |
| # |
| log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender |
| log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file} |
| log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout |
| log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n |
| log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd |
| |
| |
| # Custom Logging levels |
| # AWS SDK & S3A FileSystem |
| #log4j.logger.com.amazonaws=ERROR |
| log4j.logger.com.amazonaws.http.AmazonHttpClient=ERROR |
| #log4j.logger.org.apache.hadoop.fs.s3a.S3AFileSystem=WARN |
| |
| # |
| # Event Counter Appender |
| # Sends counts of logging messages at different severity levels to Hadoop Metrics. |
| # |
| log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter |
| |
| |
| log4j.logger.org.apache.hadoop.ozone=DEBUG,OZONE,FILE |
| |
| # Do not log into datanode logs. Remove this line to have single log. |
| log4j.additivity.org.apache.hadoop.ozone=false |
| |
| # For development purposes, log both to console and log file. |
| log4j.appender.OZONE=org.apache.log4j.ConsoleAppender |
| log4j.appender.OZONE.Threshold=info |
| log4j.appender.OZONE.layout=org.apache.log4j.PatternLayout |
| log4j.appender.OZONE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \ |
| %X{component} %X{function} %X{resource} %X{user} %X{request} - %m%n |
| |
| # Real ozone logger that writes to ozone.log |
| log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender |
| log4j.appender.FILE.File=${hadoop.log.dir}/ozone.log |
| log4j.appender.FILE.Threshold=debug |
| log4j.appender.FILE.layout=org.apache.log4j.PatternLayout |
| log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \ |
| (%F:%L) %X{function} %X{resource} %X{user} %X{request} - \ |
| %m%n |
| |
| # Log levels of third-party libraries |
| log4j.logger.org.apache.commons.beanutils=WARN |
| |
| log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR |
| log4j.logger.org.apache.ratis.conf.ConfUtils=WARN |
| log4j.logger.org.apache.hadoop.security.ShellBasedUnixGroupsMapping=ERROR |
| log4j.logger.org.apache.ratis.grpc.client.GrpcClientProtocolClient=WARN |