blob: 81a811ac03b45e460bb026bee763e4ce3c5f85b2 [file] [log] [blame]
<!DOCTYPE html>
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Writing Logs &mdash; Airflow Documentation</title>
<script type="text/javascript" src="../_static/js/modernizr.min.js"></script>
<script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
<script type="text/javascript" src="../_static/jquery.js"></script>
<script type="text/javascript" src="../_static/underscore.js"></script>
<script type="text/javascript" src="../_static/doctools.js"></script>
<script type="text/javascript" src="../_static/language_data.js"></script>
<script type="text/javascript" src="../_static/js/theme.js"></script>
<link rel="stylesheet" href="../_static/css/theme.css" type="text/css" />
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
<link rel="index" title="Index" href="../genindex.html" />
<link rel="search" title="Search" href="../search.html" />
<link rel="next" title="Celery Executor" href="executor/use-celery.html" />
<link rel="prev" title="Securing Connections" href="secure-connections.html" />
<script>
document.addEventListener('DOMContentLoaded', function() {
var el = document.getElementById('changelog');
if (el !== null ) {
// [AIRFLOW-...]
el.innerHTML = el.innerHTML.replace(
/\[(AIRFLOW-[\d]+)\]/g,
`<a href="https://issues.apache.org/jira/browse/$1">[$1]</a>`
);
// (#...)
el.innerHTML = el.innerHTML.replace(
/\(#([\d]+)\)/g,
`<a href="https://github.com/apache/airflow/pull/$1">(#$1)</a>`
);
};
})
</script>
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-140539454-1']);
_gaq.push(['_trackPageview']);
</script>
<style>
.example-header {
position: relative;
background: #9AAA7A;
padding: 8px 16px;
margin-bottom: 0;
}
.example-header--with-button {
padding-right: 166px;
}
.example-header:after{
content: '';
display: table;
clear: both;
}
.example-title {
display:block;
padding: 4px;
margin-right: 16px;
color: white;
overflow-x: auto;
}
.example-header-button {
top: 8px;
right: 16px;
position: absolute;
}
.example-header + .highlight-python {
margin-top: 0 !important;
}
.viewcode-button {
display: inline-block;
padding: 8px 16px;
border: 0;
margin: 0;
outline: 0;
border-radius: 2px;
-webkit-box-shadow: 0 3px 5px 0 rgba(0,0,0,.3);
box-shadow: 0 3px 6px 0 rgba(0,0,0,.3);
color: #404040;
background-color: #e7e7e7;
cursor: pointer;
font-size: 16px;
font-weight: 500;
line-height: 1;
text-decoration: none;
text-overflow: ellipsis;
overflow: hidden;
text-transform: uppercase;
-webkit-transition: background-color .2s;
transition: background-color .2s;
vertical-align: middle;
white-space: nowrap;
}
.viewcode-button:visited {
color: #404040;
}
.viewcode-button:hover, .viewcode-button:focus {
color: #404040;
background-color: #d6d6d6;
}
</style>
</head>
<body class="wy-body-for-nav">
<div class="wy-grid-for-nav">
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
<div class="wy-side-scroll">
<div class="wy-side-nav-search" >
<a href="../index.html" class="icon icon-home"> Airflow
</a>
<div class="version">
1.10.5
</div>
<div role="search">
<form id="rtd-search-form" class="wy-form" action="../search.html" method="get">
<input type="text" name="q" placeholder="Search docs" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
<div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
<ul class="current">
<li class="toctree-l1"><a class="reference internal" href="../project.html">Project</a></li>
<li class="toctree-l1"><a class="reference internal" href="../license.html">License</a></li>
<li class="toctree-l1"><a class="reference internal" href="../start.html">Quick Start</a></li>
<li class="toctree-l1"><a class="reference internal" href="../installation.html">Installation</a></li>
<li class="toctree-l1"><a class="reference internal" href="../tutorial.html">Tutorial</a></li>
<li class="toctree-l1 current"><a class="reference internal" href="index.html">How-to Guides</a><ul class="current">
<li class="toctree-l2"><a class="reference internal" href="set-config.html">Setting Configuration Options</a></li>
<li class="toctree-l2"><a class="reference internal" href="initialize-database.html">Initializing a Database Backend</a></li>
<li class="toctree-l2"><a class="reference internal" href="operator/index.html">Using Operators</a></li>
<li class="toctree-l2"><a class="reference internal" href="connection/index.html">Managing Connections</a></li>
<li class="toctree-l2"><a class="reference internal" href="secure-connections.html">Securing Connections</a></li>
<li class="toctree-l2"><a class="reference internal" href="secure-connections.html#rotating-encryption-keys">Rotating encryption keys</a></li>
<li class="toctree-l2 current"><a class="current reference internal" href="#">Writing Logs</a><ul>
<li class="toctree-l3"><a class="reference internal" href="#writing-logs-locally">Writing Logs Locally</a><ul>
<li class="toctree-l4"><a class="reference internal" href="#before-you-begin">Before you begin</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="#writing-logs-to-amazon-s3">Writing Logs to Amazon S3</a><ul>
<li class="toctree-l4"><a class="reference internal" href="#enabling-remote-logging">Enabling remote logging</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="#writing-logs-to-azure-blob-storage">Writing Logs to Azure Blob Storage</a></li>
<li class="toctree-l3"><a class="reference internal" href="#writing-logs-to-google-cloud-storage">Writing Logs to Google Cloud Storage</a></li>
<li class="toctree-l3"><a class="reference internal" href="#writing-logs-to-elasticsearch">Writing Logs to Elasticsearch</a></li>
<li class="toctree-l3"><a class="reference internal" href="#writing-logs-to-elasticsearch-over-tls">Writing Logs to Elasticsearch over TLS</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="executor/use-celery.html">Celery Executor</a></li>
<li class="toctree-l2"><a class="reference internal" href="executor/use-dask.html">Dask Executor</a></li>
<li class="toctree-l2"><a class="reference internal" href="executor/use-mesos.html">Scaling Out with Mesos (community contributed)</a></li>
<li class="toctree-l2"><a class="reference internal" href="run-behind-proxy.html">Running Airflow behind a reverse proxy</a></li>
<li class="toctree-l2"><a class="reference internal" href="run-with-systemd.html">Running Airflow with systemd</a></li>
<li class="toctree-l2"><a class="reference internal" href="run-with-upstart.html">Running Airflow with upstart</a></li>
<li class="toctree-l2"><a class="reference internal" href="use-test-config.html">Using the Test Mode Configuration</a></li>
<li class="toctree-l2"><a class="reference internal" href="check-health.html">Checking Airflow Health Status</a></li>
<li class="toctree-l2"><a class="reference internal" href="define_extra_link.html">Define an operator extra link</a></li>
<li class="toctree-l2"><a class="reference internal" href="tracking-user-activity.html">Tracking User Activity</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../ui.html">UI / Screenshots</a></li>
<li class="toctree-l1"><a class="reference internal" href="../concepts.html">Concepts</a></li>
<li class="toctree-l1"><a class="reference internal" href="../profiling.html">Data Profiling</a></li>
<li class="toctree-l1"><a class="reference internal" href="../cli.html">Command Line Interface Reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../scheduler.html">Scheduling &amp; Triggers</a></li>
<li class="toctree-l1"><a class="reference internal" href="../plugins.html">Plugins</a></li>
<li class="toctree-l1"><a class="reference internal" href="../security.html">Security</a></li>
<li class="toctree-l1"><a class="reference internal" href="../timezone.html">Time zones</a></li>
<li class="toctree-l1"><a class="reference internal" href="../api.html">REST API Reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../integration.html">Integration</a></li>
<li class="toctree-l1"><a class="reference internal" href="../metrics.html">Metrics</a></li>
<li class="toctree-l1"><a class="reference internal" href="../kubernetes.html">Kubernetes</a></li>
<li class="toctree-l1"><a class="reference internal" href="../lineage.html">Lineage</a></li>
<li class="toctree-l1"><a class="reference internal" href="../changelog.html">Changelog</a></li>
<li class="toctree-l1"><a class="reference internal" href="../faq.html">FAQ</a></li>
<li class="toctree-l1"><a class="reference internal" href="../macros.html">Macros reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../_api/index.html">Python API Reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../privacy_notice.html">Privacy Notice</a></li>
</ul>
</div>
</div>
</nav>
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
<nav class="wy-nav-top" aria-label="top navigation">
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
<a href="../index.html">Airflow</a>
</nav>
<div class="wy-nav-content">
<div class="rst-content">
<div role="navigation" aria-label="breadcrumbs navigation">
<ul class="wy-breadcrumbs">
<li><a href="../index.html">Docs</a> &raquo;</li>
<li><a href="index.html">How-to Guides</a> &raquo;</li>
<li>Writing Logs</li>
<li class="wy-breadcrumbs-aside">
<a href="../_sources/howto/write-logs.rst.txt" rel="nofollow"> View page source</a>
</li>
</ul>
<hr/>
</div>
<div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
<div itemprop="articleBody">
<blockquote>
<div></div></blockquote>
<div class="section" id="writing-logs">
<h1>Writing Logs<a class="headerlink" href="#writing-logs" title="Permalink to this headline"></a></h1>
<div class="section" id="writing-logs-locally">
<h2>Writing Logs Locally<a class="headerlink" href="#writing-logs-locally" title="Permalink to this headline"></a></h2>
<p>Users can specify the directory to place log files in <code class="docutils literal notranslate"><span class="pre">airflow.cfg</span></code> using
<code class="docutils literal notranslate"><span class="pre">base_log_folder</span></code>. By default, logs are placed in the <code class="docutils literal notranslate"><span class="pre">AIRFLOW_HOME</span></code>
directory.</p>
<p>The following convention is followed while naming logs: <code class="docutils literal notranslate"><span class="pre">{dag_id}/{task_id}/{execution_date}/{try_number}.log</span></code></p>
<p>In addition, users can supply a remote location to store current logs and backups.</p>
<p>In the Airflow Web UI, local logs take precedence over remote logs. If local logs
can not be found or accessed, the remote logs will be displayed. Note that logs
are only sent to remote storage once a task is complete (including failure); In other words, remote logs for
running tasks are unavailable.</p>
<div class="section" id="before-you-begin">
<h3>Before you begin<a class="headerlink" href="#before-you-begin" title="Permalink to this headline"></a></h3>
<p>Remote logging uses an existing Airflow connection to read or write logs. If you
don’t have a connection properly setup, this process will fail.</p>
</div>
</div>
<div class="section" id="writing-logs-to-amazon-s3">
<span id="write-logs-amazon"></span><h2>Writing Logs to Amazon S3<a class="headerlink" href="#writing-logs-to-amazon-s3" title="Permalink to this headline"></a></h2>
<div class="section" id="enabling-remote-logging">
<h3>Enabling remote logging<a class="headerlink" href="#enabling-remote-logging" title="Permalink to this headline"></a></h3>
<p>To enable this feature, <code class="docutils literal notranslate"><span class="pre">airflow.cfg</span></code> must be configured as follows:</p>
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">[</span>core<span class="o">]</span>
<span class="c1"># Airflow can store logs remotely in AWS S3. Users must supply a remote</span>
<span class="c1"># location URL (starting with either &#39;s3://...&#39;) and an Airflow connection</span>
<span class="c1"># id that provides access to the storage location.</span>
<span class="nv">remote_logging</span> <span class="o">=</span> True
<span class="nv">remote_base_log_folder</span> <span class="o">=</span> s3://my-bucket/path/to/logs
<span class="nv">remote_log_conn_id</span> <span class="o">=</span> MyS3Conn
<span class="c1"># Use server-side encryption for logs stored in S3</span>
<span class="nv">encrypt_s3_logs</span> <span class="o">=</span> False
</pre></div>
</div>
<p>In the above example, Airflow will try to use <code class="docutils literal notranslate"><span class="pre">S3Hook('MyS3Conn')</span></code>.</p>
</div>
</div>
<div class="section" id="writing-logs-to-azure-blob-storage">
<span id="write-logs-azure"></span><h2>Writing Logs to Azure Blob Storage<a class="headerlink" href="#writing-logs-to-azure-blob-storage" title="Permalink to this headline"></a></h2>
<p>Airflow can be configured to read and write task logs in Azure Blob Storage.</p>
<p>Follow the steps below to enable Azure Blob Storage logging:</p>
<ol class="arabic">
<li><p>Airflow’s logging system requires a custom <cite>.py</cite> file to be located in the <code class="docutils literal notranslate"><span class="pre">PYTHONPATH</span></code>, so that it’s importable from Airflow. Start by creating a directory to store the config file, <code class="docutils literal notranslate"><span class="pre">$AIRFLOW_HOME/config</span></code> is recommended.</p></li>
<li><p>Create empty files called <code class="docutils literal notranslate"><span class="pre">$AIRFLOW_HOME/config/log_config.py</span></code> and <code class="docutils literal notranslate"><span class="pre">$AIRFLOW_HOME/config/__init__.py</span></code>.</p></li>
<li><p>Copy the contents of <code class="docutils literal notranslate"><span class="pre">airflow/config_templates/airflow_local_settings.py</span></code> into the <code class="docutils literal notranslate"><span class="pre">log_config.py</span></code> file created in <cite>Step 2</cite>.</p></li>
<li><p>Customize the following portions of the template:</p>
<blockquote>
<div><div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="c1"># wasb buckets should start with &quot;wasb&quot; just to help Airflow select correct handler</span>
<span class="nv">REMOTE_BASE_LOG_FOLDER</span> <span class="o">=</span> <span class="s1">&#39;wasb-&lt;whatever you want here&gt;&#39;</span>
<span class="c1"># Rename DEFAULT_LOGGING_CONFIG to LOGGING CONFIG</span>
<span class="nv">LOGGING_CONFIG</span> <span class="o">=</span> ...
</pre></div>
</div>
</div></blockquote>
</li>
<li><p>Make sure a Azure Blob Storage (Wasb) connection hook has been defined in Airflow. The hook should have read and write access to the Azure Blob Storage bucket defined above in <code class="docutils literal notranslate"><span class="pre">REMOTE_BASE_LOG_FOLDER</span></code>.</p></li>
<li><p>Update <code class="docutils literal notranslate"><span class="pre">$AIRFLOW_HOME/airflow.cfg</span></code> to contain:</p>
<blockquote>
<div><div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="nv">remote_logging</span> <span class="o">=</span> True
<span class="nv">logging_config_class</span> <span class="o">=</span> log_config.LOGGING_CONFIG
<span class="nv">remote_log_conn_id</span> <span class="o">=</span> &lt;name of the Azure Blob Storage connection&gt;
</pre></div>
</div>
</div></blockquote>
</li>
<li><p>Restart the Airflow webserver and scheduler, and trigger (or wait for) a new task execution.</p></li>
<li><p>Verify that logs are showing up for newly executed tasks in the bucket you’ve defined.</p></li>
</ol>
</div>
<div class="section" id="writing-logs-to-google-cloud-storage">
<span id="write-logs-gcp"></span><h2>Writing Logs to Google Cloud Storage<a class="headerlink" href="#writing-logs-to-google-cloud-storage" title="Permalink to this headline"></a></h2>
<p>Follow the steps below to enable Google Cloud Storage logging.</p>
<p>To enable this feature, <code class="docutils literal notranslate"><span class="pre">airflow.cfg</span></code> must be configured as in this
example:</p>
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">[</span>core<span class="o">]</span>
<span class="c1"># Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search.</span>
<span class="c1"># Users must supply an Airflow connection id that provides access to the storage</span>
<span class="c1"># location. If remote_logging is set to true, see UPDATING.md for additional</span>
<span class="c1"># configuration requirements.</span>
<span class="nv">remote_logging</span> <span class="o">=</span> True
<span class="nv">remote_base_log_folder</span> <span class="o">=</span> gs://my-bucket/path/to/logs
<span class="nv">remote_log_conn_id</span> <span class="o">=</span> MyGCSConn
</pre></div>
</div>
<ol class="arabic simple">
<li><p>Install the <code class="docutils literal notranslate"><span class="pre">gcp</span></code> package first, like so: <code class="docutils literal notranslate"><span class="pre">pip</span> <span class="pre">install</span> <span class="pre">'apache-airflow[gcp]'</span></code>.</p></li>
<li><p>Make sure a Google Cloud Platform connection hook has been defined in Airflow. The hook should have read and write access to the Google Cloud Storage bucket defined above in <code class="docutils literal notranslate"><span class="pre">remote_base_log_folder</span></code>.</p></li>
<li><p>Restart the Airflow webserver and scheduler, and trigger (or wait for) a new task execution.</p></li>
<li><p>Verify that logs are showing up for newly executed tasks in the bucket you’ve defined.</p></li>
<li><p>Verify that the Google Cloud Storage viewer is working in the UI. Pull up a newly executed task, and verify that you see something like:</p></li>
</ol>
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span>*** Reading remote log from gs://&lt;bucket where logs should be persisted&gt;/example_bash_operator/run_this_last/2017-10-03T00:00:00/16.log.
<span class="o">[</span><span class="m">2017</span>-10-03 <span class="m">21</span>:57:50,056<span class="o">]</span> <span class="o">{</span>cli.py:377<span class="o">}</span> INFO - Running on host chrisr-00532
<span class="o">[</span><span class="m">2017</span>-10-03 <span class="m">21</span>:57:50,093<span class="o">]</span> <span class="o">{</span>base_task_runner.py:115<span class="o">}</span> INFO - Running: <span class="o">[</span><span class="s1">&#39;bash&#39;</span>, <span class="s1">&#39;-c&#39;</span>, u<span class="s1">&#39;airflow run example_bash_operator run_this_last 2017-10-03T00:00:00 --job_id 47 --raw -sd DAGS_FOLDER/example_dags/example_bash_operator.py&#39;</span><span class="o">]</span>
<span class="o">[</span><span class="m">2017</span>-10-03 <span class="m">21</span>:57:51,264<span class="o">]</span> <span class="o">{</span>base_task_runner.py:98<span class="o">}</span> INFO - Subtask: <span class="o">[</span><span class="m">2017</span>-10-03 <span class="m">21</span>:57:51,263<span class="o">]</span> <span class="o">{</span>__init__.py:45<span class="o">}</span> INFO - Using executor SequentialExecutor
<span class="o">[</span><span class="m">2017</span>-10-03 <span class="m">21</span>:57:51,306<span class="o">]</span> <span class="o">{</span>base_task_runner.py:98<span class="o">}</span> INFO - Subtask: <span class="o">[</span><span class="m">2017</span>-10-03 <span class="m">21</span>:57:51,306<span class="o">]</span> <span class="o">{</span>models.py:186<span class="o">}</span> INFO - Filling up the DagBag from /airflow/dags/example_dags/example_bash_operator.py
</pre></div>
</div>
<p><strong>Note</strong> that the path to the remote log file is listed on the first line.</p>
</div>
<div class="section" id="writing-logs-to-elasticsearch">
<span id="write-logs-elasticsearch"></span><h2>Writing Logs to Elasticsearch<a class="headerlink" href="#writing-logs-to-elasticsearch" title="Permalink to this headline"></a></h2>
<p>Airflow can be configured to read task logs from Elasticsearch and optionally write logs to stdout in standard or json format. These logs can later be collected and forwarded to the Elasticsearch cluster using tools like fluentd, logstash or others.</p>
<p>You can choose to have all task logs from workers output to the highest parent level process, instead of the standard file locations. This allows for some additional flexibility in container environments like Kubernetes, where container stdout is already being logged to the host nodes. From there a log shipping tool can be used to forward them along to Elasticsearch. To use this feature, set the <code class="docutils literal notranslate"><span class="pre">write_stdout</span></code> option in <code class="docutils literal notranslate"><span class="pre">airflow.cfg</span></code>.
You can also choose to have the logs output in a JSON format, using the <code class="docutils literal notranslate"><span class="pre">json_format</span></code> option. Airflow uses the standard Python logging module and JSON fields are directly extracted from the LogRecord object. To use this feature, set the <code class="docutils literal notranslate"><span class="pre">json_fields</span></code> option in <code class="docutils literal notranslate"><span class="pre">airflow.cfg</span></code>. Add the fields to the comma-delimited string that you want collected for the logs. These fields are from the LogRecord object in the <code class="docutils literal notranslate"><span class="pre">logging</span></code> module. <a class="reference external" href="https://docs.python.org/3/library/logging.html#logrecord-objects/">Documentation on different attributes can be found here</a>.</p>
<p>First, to use the handler, airflow.cfg must be configured as follows:</p>
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">[</span>core<span class="o">]</span>
<span class="c1"># Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search.</span>
<span class="c1"># Users must supply an Airflow connection id that provides access to the storage</span>
<span class="c1"># location. If remote_logging is set to true, see UPDATING.md for additional</span>
<span class="c1"># configuration requirements.</span>
<span class="nv">remote_logging</span> <span class="o">=</span> True
<span class="o">[</span>elasticsearch<span class="o">]</span>
<span class="nv">log_id_template</span> <span class="o">=</span> <span class="o">{{</span>dag_id<span class="o">}}</span>-<span class="o">{{</span>task_id<span class="o">}}</span>-<span class="o">{{</span>execution_date<span class="o">}}</span>-<span class="o">{{</span>try_number<span class="o">}}</span>
<span class="nv">end_of_log_mark</span> <span class="o">=</span> end_of_log
<span class="nv">write_stdout</span> <span class="o">=</span>
<span class="nv">json_fields</span> <span class="o">=</span>
</pre></div>
</div>
<p>To output task logs to stdout in JSON format, the following config could be used:</p>
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">[</span>core<span class="o">]</span>
<span class="c1"># Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search.</span>
<span class="c1"># Users must supply an Airflow connection id that provides access to the storage</span>
<span class="c1"># location. If remote_logging is set to true, see UPDATING.md for additional</span>
<span class="c1"># configuration requirements.</span>
<span class="nv">remote_logging</span> <span class="o">=</span> True
<span class="o">[</span>elasticsearch<span class="o">]</span>
<span class="nv">log_id_template</span> <span class="o">=</span> <span class="o">{{</span>dag_id<span class="o">}}</span>-<span class="o">{{</span>task_id<span class="o">}}</span>-<span class="o">{{</span>execution_date<span class="o">}}</span>-<span class="o">{{</span>try_number<span class="o">}}</span>
<span class="nv">end_of_log_mark</span> <span class="o">=</span> end_of_log
<span class="nv">write_stdout</span> <span class="o">=</span> True
<span class="nv">json_format</span> <span class="o">=</span> True
<span class="nv">json_fields</span> <span class="o">=</span> asctime, filename, lineno, levelname, message
</pre></div>
</div>
</div>
<div class="section" id="writing-logs-to-elasticsearch-over-tls">
<span id="write-logs-elasticsearch-tls"></span><h2>Writing Logs to Elasticsearch over TLS<a class="headerlink" href="#writing-logs-to-elasticsearch-over-tls" title="Permalink to this headline"></a></h2>
<p>To add custom configurations to ElasticSearch (e.g. turning on ssl_verify, adding a custom self-signed cert, etc.) use the <cite>elasticsearch_configs</cite> setting in your airfow.cfg</p>
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="o">[</span>core<span class="o">]</span>
<span class="c1"># Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search.</span>
<span class="c1"># Users must supply an Airflow connection id that provides access to the storage</span>
<span class="c1"># location. If remote_logging is set to true, see UPDATING.md for additional</span>
<span class="c1"># configuration requirements.</span>
<span class="nv">remote_logging</span> <span class="o">=</span> True
<span class="o">[</span>elasticsearch_configs<span class="o">]</span>
<span class="nv">use_ssl</span><span class="o">=</span>True
<span class="nv">verify_certs</span><span class="o">=</span>True
<span class="nv">ca_certs</span><span class="o">=</span>/path/to/CA_certs
</pre></div>
</div>
</div>
</div>
</div>
</div>
<footer>
<div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
<a href="executor/use-celery.html" class="btn btn-neutral float-right" title="Celery Executor" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right"></span></a>
<a href="secure-connections.html" class="btn btn-neutral float-left" title="Securing Connections" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left"></span> Previous</a>
</div>
<hr/>
<div role="contentinfo">
<p>
</p>
</div>
Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
<div class="footer">This page uses <a href="https://analytics.google.com/">
Google Analytics</a> to collect statistics. You can disable it by blocking
the JavaScript coming from www.google-analytics.com. Check our
<a href="../privacy_notice.html">Privacy Policy</a>
for more details.
<script type="text/javascript">
(function() {
var ga = document.createElement('script');
ga.src = ('https:' == document.location.protocol ?
'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
ga.setAttribute('async', 'true');
var nodes = document.documentElement.childNodes;
var i = -1;
var node;
do {
i++;
node = nodes[i]
} while(node.nodeType !== Node.ELEMENT_NODE);
node.appendChild(ga);
})();
</script>
</div>
</footer>
</div>
</div>
</section>
</div>
<script type="text/javascript">
jQuery(function () {
SphinxRtdTheme.Navigation.enable(true);
});
</script>
</body>
</html>