blob: 523f52cb69a048e45b945f18541fa0d3931bfdcb [file] [log] [blame]
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This is the template for Airflow's unit test configuration. When Airflow runs
# unit tests, it looks for a configuration file at $AIRFLOW_HOME/unittests.cfg.
# If it doesn't exist, Airflow uses this template to generate it by replacing
# variables in curly braces with their global values from configuration.py.
# Users should not modify this file; they should customize the generated
# unittests.cfg instead.
# ----------------------- TEMPLATE BEGINS HERE -----------------------
[core]
unit_test_mode = True
dags_folder = {TEST_DAGS_FOLDER}
plugins_folder = {TEST_PLUGINS_FOLDER}
dags_are_paused_at_creation = False
fernet_key = {FERNET_KEY}
killed_task_cleanup_time = 5
allowed_deserialization_classes = airflow\..*
tests\..*
[database]
sql_alchemy_conn = sqlite:///{AIRFLOW_HOME}/unittests.db
[logging]
celery_logging_level = WARN
[api]
auth_backends = airflow.api.auth.backend.default
[hive]
default_hive_mapred_queue = airflow
[smtp]
smtp_user = airflow
smtp_password = airflow
[celery]
broker_url = sqla+mysql://airflow:airflow@localhost:3306/airflow
result_backend = db+mysql://airflow:airflow@localhost:3306/airflow
[scheduler]
job_heartbeat_sec = 1
schedule_after_task_execution = False
scheduler_health_check_server_port = 8794
dag_dir_list_interval = 0
[elasticsearch]
log_id_template = {{dag_id}}-{{task_id}}-{{run_id}}-{{map_index}}-{{try_number}}