blob: 767176d7fdade89a4b686375f33c15b5400c2603 [file] [log] [blame]
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This is the template for Airflow's unit test configuration. When Airflow runs
# unit tests, it looks for a configuration file at $AIRFLOW_HOME/unittests.cfg.
# If it doesn't exist, Airflow uses this template to generate it by replacing
# variables in curly braces with their global values from configuration.py.
# Users should not modify this file; they should customize the generated
# unittests.cfg instead.
# ----------------------- TEMPLATE BEGINS HERE -----------------------
[core]
unit_test_mode = True
dags_folder = {TEST_DAGS_FOLDER}
plugins_folder = {TEST_PLUGINS_FOLDER}
executor = SequentialExecutor
sql_alchemy_conn = sqlite:///{AIRFLOW_HOME}/unittests.db
load_examples = True
load_default_connections = True
donot_pickle = True
dag_concurrency = 16
dags_are_paused_at_creation = False
fernet_key = {FERNET_KEY}
enable_xcom_pickling = False
killed_task_cleanup_time = 5
hostname_callable = socket.getfqdn
default_task_retries = 0
# This is a hack, too many tests assume DAGs are already in the DB. We need to fix those tests instead
store_serialized_dags = False
[logging]
base_log_folder = {AIRFLOW_HOME}/logs
logging_level = INFO
fab_logging_level = WARN
log_filename_template = {{{{ ti.dag_id }}}}/{{{{ ti.task_id }}}}/{{{{ ts }}}}/{{{{ try_number }}}}.log
log_processor_filename_template = {{{{ filename }}}}.log
dag_processor_manager_log_location = {AIRFLOW_HOME}/logs/dag_processor_manager/dag_processor_manager.log
[cli]
api_client = airflow.api.client.local_client
endpoint_url = http://localhost:8080
[api]
auth_backend = airflow.api.auth.backend.default
[operators]
default_owner = airflow
[hive]
default_hive_mapred_queue = airflow
[webserver]
base_url = http://localhost:8080
web_server_host = 0.0.0.0
web_server_port = 8080
dag_orientation = LR
dag_default_view = tree
log_fetch_timeout_sec = 5
hide_paused_dags_by_default = False
page_size = 100
[email]
email_backend = airflow.utils.email.send_email_smtp
[smtp]
smtp_host = localhost
smtp_user = airflow
smtp_port = 25
smtp_password = airflow
smtp_mail_from = airflow@example.com
smtp_retry_limit = 5
smtp_timeout = 30
[celery]
celery_app_name = airflow.executors.celery_executor
worker_concurrency = 16
worker_log_server_port = 8793
broker_url = sqla+mysql://airflow:airflow@localhost:3306/airflow
result_backend = db+mysql://airflow:airflow@localhost:3306/airflow
flower_host = 0.0.0.0
flower_port = 5555
default_queue = default
sync_parallelism = 0
worker_precheck = False
[scheduler]
job_heartbeat_sec = 1
schedule_after_task_execution = False
scheduler_heartbeat_sec = 5
scheduler_health_check_threshold = 30
parsing_processes = 2
catchup_by_default = True
scheduler_zombie_task_threshold = 300
dag_dir_list_interval = 0
max_tis_per_query = 512
[admin]
hide_sensitive_variable_fields = True
sensitive_variable_fields =
[elasticsearch]
host =
log_id_template = {{dag_id}}-{{task_id}}-{{execution_date}}-{{try_number}}
end_of_log_mark = end_of_log
[elasticsearch_configs]
use_ssl = False
verify_certs = True
[kubernetes]
dags_volume_claim = default