blob: 0951114470faa7499f4a8162fe6de1cc25c99abe [file] [log] [blame]
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Set everything to be logged to the file core/target/unit-tests.log
rootLogger.level = info
rootLogger.appenderRef.stdout.ref = STDOUT
rootLogger.appenderRef.file.ref = File
#Console Appender
appender.console.type = Console
appender.console.name = STDOUT
appender.console.target = SYSTEM_OUT
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{HH:mm:ss.SSS} %p %c: %maxLen{%m}{512}%n%ex{8}%n
appender.console.filter.threshold.type = ThresholdFilter
appender.console.filter.threshold.level = warn
appender.structured.type = File
appender.structured.name = structured
appender.structured.fileName = target/LogQuerySuite.log
appender.structured.layout.type = JsonTemplateLayout
appender.structured.layout.eventTemplateUri = classpath:org/apache/spark/SparkLayout.json
#File Appender
appender.file.type = File
appender.file.name = File
appender.file.fileName = target/unit-tests.log
appender.file.layout.type = PatternLayout
appender.file.layout.pattern = %d{HH:mm:ss.SSS} %t %p %c{1}: %m%n%ex
appender.file.filter.threshold.type = ThresholdFilter
appender.file.filter.threshold.level = info
# Some packages are noisy for no good reason.
logger.parquet_recordreader.name = org.apache.parquet.hadoop.ParquetRecordReader
logger.parquet_recordreader.additivity = false
logger.parquet_recordreader.level = off
# SPARK-44922: Disable o.a.p.h.InternalParquetRecordWriter logs for tests
logger.parquet_recordwriter.name = org.apache.parquet.hadoop.InternalParquetRecordWriter
logger.parquet_recordwriter.additivity = false
logger.parquet_recordwriter.level = off
logger.parquet_outputcommitter.name = org.apache.parquet.hadoop.ParquetOutputCommitter
logger.parquet_outputcommitter.additivity = false
logger.parquet_outputcommitter.level = off
logger.hadoop_lazystruct.name = org.apache.hadoop.hive.serde2.lazy.LazyStruct
logger.hadoop_lazystruct.additivity = false
logger.hadoop_lazystruct.level = off
logger.hadoop_retryinghmshandler.name = org.apache.hadoop.hive.metastore.RetryingHMSHandler
logger.hadoop_retryinghmshandler.additivity = false
logger.hadoop_retryinghmshandler.level = off
logger.hive_metadata.name = hive.ql.metadata.Hive
logger.hive_metadata.additivity = false
logger.hive_metadata.level = off
# Parquet related logging
logger.parquet1.name = org.apache.parquet.CorruptStatistics
logger.parquet1.level = error
logger.parquet2.name = parquet.CorruptStatistics
logger.parquet2.level = error
# Custom loggers
logger.structured.name = org.apache.spark.sql.LogQuerySuite
logger.structured.level = trace
logger.structured.appenderRefs = structured
logger.structured.appenderRef.structured.ref = structured
# Ignore messages below error level from HadoopFSUtils, because it's a bit verbose
logger.hadoopfsutils.name = org.apache.spark.util.HadoopFSUtils
logger.hadoopfsutils.level = error