blob: 4d1737ad7e53ba0c52b6c4c3893c12a468c10a39 [file] [log] [blame]
<!DOCTYPE html>
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>airflow.contrib.operators.databricks_operator &mdash; Airflow Documentation</title>
<link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" />
<link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" />
<link rel="index" title="Index" href="../../../../genindex.html" />
<link rel="search" title="Search" href="../../../../search.html" />
<script src="../../../../_static/js/modernizr.min.js"></script>
</head>
<body class="wy-body-for-nav">
<div class="wy-grid-for-nav">
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
<div class="wy-side-scroll">
<div class="wy-side-nav-search">
<a href="../../../../index.html" class="icon icon-home"> Airflow
</a>
<div class="version">
1.10.2
</div>
<div role="search">
<form id="rtd-search-form" class="wy-form" action="../../../../search.html" method="get">
<input type="text" name="q" placeholder="Search docs" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
<div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../../../project.html">Project</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../license.html">License</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../start.html">Quick Start</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../installation.html">Installation</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../tutorial.html">Tutorial</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../howto/index.html">How-to Guides</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../ui.html">UI / Screenshots</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../concepts.html">Concepts</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../profiling.html">Data Profiling</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../cli.html">Command Line Interface</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../scheduler.html">Scheduling &amp; Triggers</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../plugins.html">Plugins</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../security.html">Security</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../timezone.html">Time zones</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../api.html">Experimental Rest API</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../integration.html">Integration</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../metrics.html">Metrics</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../kubernetes.html">Kubernetes</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../lineage.html">Lineage</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../changelog.html">Changelog</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../faq.html">FAQ</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../code.html">API Reference</a></li>
</ul>
</div>
</div>
</nav>
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
<nav class="wy-nav-top" aria-label="top navigation">
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
<a href="../../../../index.html">Airflow</a>
</nav>
<div class="wy-nav-content">
<div class="rst-content">
<div role="navigation" aria-label="breadcrumbs navigation">
<ul class="wy-breadcrumbs">
<li><a href="../../../../index.html">Docs</a> &raquo;</li>
<li><a href="../../../index.html">Module code</a> &raquo;</li>
<li>airflow.contrib.operators.databricks_operator</li>
<li class="wy-breadcrumbs-aside">
</li>
</ul>
<hr/>
</div>
<div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
<div itemprop="articleBody">
<h1>Source code for airflow.contrib.operators.databricks_operator</h1><div class="highlight"><pre>
<span></span><span class="c1"># -*- coding: utf-8 -*-</span>
<span class="c1">#</span>
<span class="c1"># Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="c1"># or more contributor license agreements. See the NOTICE file</span>
<span class="c1"># distributed with this work for additional information</span>
<span class="c1"># regarding copyright ownership. The ASF licenses this file</span>
<span class="c1"># to you under the Apache License, Version 2.0 (the</span>
<span class="c1"># &quot;License&quot;); you may not use this file except in compliance</span>
<span class="c1"># with the License. You may obtain a copy of the License at</span>
<span class="c1">#</span>
<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="c1">#</span>
<span class="c1"># Unless required by applicable law or agreed to in writing,</span>
<span class="c1"># software distributed under the License is distributed on an</span>
<span class="c1"># &quot;AS IS&quot; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY</span>
<span class="c1"># KIND, either express or implied. See the License for the</span>
<span class="c1"># specific language governing permissions and limitations</span>
<span class="c1"># under the License.</span>
<span class="c1">#</span>
<span class="kn">import</span> <span class="nn">six</span>
<span class="kn">import</span> <span class="nn">time</span>
<span class="kn">from</span> <span class="nn">airflow.exceptions</span> <span class="k">import</span> <span class="n">AirflowException</span>
<span class="kn">from</span> <span class="nn">airflow.contrib.hooks.databricks_hook</span> <span class="k">import</span> <span class="n">DatabricksHook</span>
<span class="kn">from</span> <span class="nn">airflow.models</span> <span class="k">import</span> <span class="n">BaseOperator</span>
<span class="kn">from</span> <span class="nn">airflow.utils.decorators</span> <span class="k">import</span> <span class="n">apply_defaults</span>
<span class="n">XCOM_RUN_ID_KEY</span> <span class="o">=</span> <span class="s1">&#39;run_id&#39;</span>
<span class="n">XCOM_RUN_PAGE_URL_KEY</span> <span class="o">=</span> <span class="s1">&#39;run_page_url&#39;</span>
<span class="k">def</span> <span class="nf">_deep_string_coerce</span><span class="p">(</span><span class="n">content</span><span class="p">,</span> <span class="n">json_path</span><span class="o">=</span><span class="s1">&#39;json&#39;</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Coerces content or all values of content if it is a dict to a string. The</span>
<span class="sd"> function will throw if content contains non-string or non-numeric types.</span>
<span class="sd"> The reason why we have this function is because the ``self.json`` field must be a</span>
<span class="sd"> dict with only string values. This is because ``render_template`` will fail</span>
<span class="sd"> for numerical values.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">c</span> <span class="o">=</span> <span class="n">_deep_string_coerce</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">content</span><span class="p">,</span> <span class="n">six</span><span class="o">.</span><span class="n">string_types</span><span class="p">):</span>
<span class="k">return</span> <span class="n">content</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">content</span><span class="p">,</span> <span class="n">six</span><span class="o">.</span><span class="n">integer_types</span> <span class="o">+</span> <span class="p">(</span><span class="nb">float</span><span class="p">,)):</span>
<span class="c1"># Databricks can tolerate either numeric or string types in the API backend.</span>
<span class="k">return</span> <span class="nb">str</span><span class="p">(</span><span class="n">content</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">content</span><span class="p">,</span> <span class="p">(</span><span class="nb">list</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)):</span>
<span class="k">return</span> <span class="p">[</span><span class="n">c</span><span class="p">(</span><span class="n">e</span><span class="p">,</span> <span class="s1">&#39;</span><span class="si">{0}</span><span class="s1">[</span><span class="si">{1}</span><span class="s1">]&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">json_path</span><span class="p">,</span> <span class="n">i</span><span class="p">))</span> <span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">e</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">content</span><span class="p">)]</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">content</span><span class="p">,</span> <span class="nb">dict</span><span class="p">):</span>
<span class="k">return</span> <span class="p">{</span><span class="n">k</span><span class="p">:</span> <span class="n">c</span><span class="p">(</span><span class="n">v</span><span class="p">,</span> <span class="s1">&#39;</span><span class="si">{0}</span><span class="s1">[</span><span class="si">{1}</span><span class="s1">]&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">json_path</span><span class="p">,</span> <span class="n">k</span><span class="p">))</span>
<span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="nb">list</span><span class="p">(</span><span class="n">content</span><span class="o">.</span><span class="n">items</span><span class="p">())}</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">param_type</span> <span class="o">=</span> <span class="nb">type</span><span class="p">(</span><span class="n">content</span><span class="p">)</span>
<span class="n">msg</span> <span class="o">=</span> <span class="s1">&#39;Type </span><span class="si">{0}</span><span class="s1"> used for parameter </span><span class="si">{1}</span><span class="s1"> is not a number or a string&#39;</span> \
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">param_type</span><span class="p">,</span> <span class="n">json_path</span><span class="p">)</span>
<span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="n">msg</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_handle_databricks_operator_execution</span><span class="p">(</span><span class="n">operator</span><span class="p">,</span> <span class="n">hook</span><span class="p">,</span> <span class="n">log</span><span class="p">,</span> <span class="n">context</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Handles the Airflow + Databricks lifecycle logic for a Databricks operator</span>
<span class="sd"> :param operator: Databricks operator being handled</span>
<span class="sd"> :param context: Airflow context</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="n">operator</span><span class="o">.</span><span class="n">do_xcom_push</span><span class="p">:</span>
<span class="n">context</span><span class="p">[</span><span class="s1">&#39;ti&#39;</span><span class="p">]</span><span class="o">.</span><span class="n">xcom_push</span><span class="p">(</span><span class="n">key</span><span class="o">=</span><span class="n">XCOM_RUN_ID_KEY</span><span class="p">,</span> <span class="n">value</span><span class="o">=</span><span class="n">operator</span><span class="o">.</span><span class="n">run_id</span><span class="p">)</span>
<span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;Run submitted with run_id: </span><span class="si">%s</span><span class="s1">&#39;</span><span class="p">,</span> <span class="n">operator</span><span class="o">.</span><span class="n">run_id</span><span class="p">)</span>
<span class="n">run_page_url</span> <span class="o">=</span> <span class="n">hook</span><span class="o">.</span><span class="n">get_run_page_url</span><span class="p">(</span><span class="n">operator</span><span class="o">.</span><span class="n">run_id</span><span class="p">)</span>
<span class="k">if</span> <span class="n">operator</span><span class="o">.</span><span class="n">do_xcom_push</span><span class="p">:</span>
<span class="n">context</span><span class="p">[</span><span class="s1">&#39;ti&#39;</span><span class="p">]</span><span class="o">.</span><span class="n">xcom_push</span><span class="p">(</span><span class="n">key</span><span class="o">=</span><span class="n">XCOM_RUN_PAGE_URL_KEY</span><span class="p">,</span> <span class="n">value</span><span class="o">=</span><span class="n">run_page_url</span><span class="p">)</span>
<span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;View run status, Spark UI, and logs at </span><span class="si">%s</span><span class="s1">&#39;</span><span class="p">,</span> <span class="n">run_page_url</span><span class="p">)</span>
<span class="k">while</span> <span class="kc">True</span><span class="p">:</span>
<span class="n">run_state</span> <span class="o">=</span> <span class="n">hook</span><span class="o">.</span><span class="n">get_run_state</span><span class="p">(</span><span class="n">operator</span><span class="o">.</span><span class="n">run_id</span><span class="p">)</span>
<span class="k">if</span> <span class="n">run_state</span><span class="o">.</span><span class="n">is_terminal</span><span class="p">:</span>
<span class="k">if</span> <span class="n">run_state</span><span class="o">.</span><span class="n">is_successful</span><span class="p">:</span>
<span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;</span><span class="si">%s</span><span class="s1"> completed successfully.&#39;</span><span class="p">,</span> <span class="n">operator</span><span class="o">.</span><span class="n">task_id</span><span class="p">)</span>
<span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;View run status, Spark UI, and logs at </span><span class="si">%s</span><span class="s1">&#39;</span><span class="p">,</span> <span class="n">run_page_url</span><span class="p">)</span>
<span class="k">return</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">error_message</span> <span class="o">=</span> <span class="s1">&#39;</span><span class="si">{t}</span><span class="s1"> failed with terminal state: </span><span class="si">{s}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span>
<span class="n">t</span><span class="o">=</span><span class="n">operator</span><span class="o">.</span><span class="n">task_id</span><span class="p">,</span>
<span class="n">s</span><span class="o">=</span><span class="n">run_state</span><span class="p">)</span>
<span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="n">error_message</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;</span><span class="si">%s</span><span class="s1"> in run state: </span><span class="si">%s</span><span class="s1">&#39;</span><span class="p">,</span> <span class="n">operator</span><span class="o">.</span><span class="n">task_id</span><span class="p">,</span> <span class="n">run_state</span><span class="p">)</span>
<span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;View run status, Spark UI, and logs at </span><span class="si">%s</span><span class="s1">&#39;</span><span class="p">,</span> <span class="n">run_page_url</span><span class="p">)</span>
<span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;Sleeping for </span><span class="si">%s</span><span class="s1"> seconds.&#39;</span><span class="p">,</span> <span class="n">operator</span><span class="o">.</span><span class="n">polling_period_seconds</span><span class="p">)</span>
<span class="n">time</span><span class="o">.</span><span class="n">sleep</span><span class="p">(</span><span class="n">operator</span><span class="o">.</span><span class="n">polling_period_seconds</span><span class="p">)</span>
<div class="viewcode-block" id="DatabricksSubmitRunOperator"><a class="viewcode-back" href="../../../../integration.html#airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator">[docs]</a><span class="k">class</span> <span class="nc">DatabricksSubmitRunOperator</span><span class="p">(</span><span class="n">BaseOperator</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Submits a Spark job run to Databricks using the</span>
<span class="sd"> `api/2.0/jobs/runs/submit</span>
<span class="sd"> &lt;https://docs.databricks.com/api/latest/jobs.html#runs-submit&gt;`_</span>
<span class="sd"> API endpoint.</span>
<span class="sd"> There are two ways to instantiate this operator.</span>
<span class="sd"> In the first way, you can take the JSON payload that you typically use</span>
<span class="sd"> to call the ``api/2.0/jobs/runs/submit`` endpoint and pass it directly</span>
<span class="sd"> to our ``DatabricksSubmitRunOperator`` through the ``json`` parameter.</span>
<span class="sd"> For example ::</span>
<span class="sd"> json = {</span>
<span class="sd"> &#39;new_cluster&#39;: {</span>
<span class="sd"> &#39;spark_version&#39;: &#39;2.1.0-db3-scala2.11&#39;,</span>
<span class="sd"> &#39;num_workers&#39;: 2</span>
<span class="sd"> },</span>
<span class="sd"> &#39;notebook_task&#39;: {</span>
<span class="sd"> &#39;notebook_path&#39;: &#39;/Users/airflow@example.com/PrepareData&#39;,</span>
<span class="sd"> },</span>
<span class="sd"> }</span>
<span class="sd"> notebook_run = DatabricksSubmitRunOperator(task_id=&#39;notebook_run&#39;, json=json)</span>
<span class="sd"> Another way to accomplish the same thing is to use the named parameters</span>
<span class="sd"> of the ``DatabricksSubmitRunOperator`` directly. Note that there is exactly</span>
<span class="sd"> one named parameter for each top level parameter in the ``runs/submit``</span>
<span class="sd"> endpoint. In this method, your code would look like this: ::</span>
<span class="sd"> new_cluster = {</span>
<span class="sd"> &#39;spark_version&#39;: &#39;2.1.0-db3-scala2.11&#39;,</span>
<span class="sd"> &#39;num_workers&#39;: 2</span>
<span class="sd"> }</span>
<span class="sd"> notebook_task = {</span>
<span class="sd"> &#39;notebook_path&#39;: &#39;/Users/airflow@example.com/PrepareData&#39;,</span>
<span class="sd"> }</span>
<span class="sd"> notebook_run = DatabricksSubmitRunOperator(</span>
<span class="sd"> task_id=&#39;notebook_run&#39;,</span>
<span class="sd"> new_cluster=new_cluster,</span>
<span class="sd"> notebook_task=notebook_task)</span>
<span class="sd"> In the case where both the json parameter **AND** the named parameters</span>
<span class="sd"> are provided, they will be merged together. If there are conflicts during the merge,</span>
<span class="sd"> the named parameters will take precedence and override the top level ``json`` keys.</span>
<span class="sd"> Currently the named parameters that ``DatabricksSubmitRunOperator`` supports are</span>
<span class="sd"> - ``spark_jar_task``</span>
<span class="sd"> - ``notebook_task``</span>
<span class="sd"> - ``new_cluster``</span>
<span class="sd"> - ``existing_cluster_id``</span>
<span class="sd"> - ``libraries``</span>
<span class="sd"> - ``run_name``</span>
<span class="sd"> - ``timeout_seconds``</span>
<span class="sd"> :param json: A JSON object containing API parameters which will be passed</span>
<span class="sd"> directly to the ``api/2.0/jobs/runs/submit`` endpoint. The other named parameters</span>
<span class="sd"> (i.e. ``spark_jar_task``, ``notebook_task``..) to this operator will</span>
<span class="sd"> be merged with this json dictionary if they are provided.</span>
<span class="sd"> If there are conflicts during the merge, the named parameters will</span>
<span class="sd"> take precedence and override the top level json keys. (templated)</span>
<span class="sd"> .. seealso::</span>
<span class="sd"> For more information about templating see :ref:`jinja-templating`.</span>
<span class="sd"> https://docs.databricks.com/api/latest/jobs.html#runs-submit</span>
<span class="sd"> :type json: dict</span>
<span class="sd"> :param spark_jar_task: The main class and parameters for the JAR task. Note that</span>
<span class="sd"> the actual JAR is specified in the ``libraries``.</span>
<span class="sd"> *EITHER* ``spark_jar_task`` *OR* ``notebook_task`` should be specified.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> .. seealso::</span>
<span class="sd"> https://docs.databricks.com/api/latest/jobs.html#jobssparkjartask</span>
<span class="sd"> :type spark_jar_task: dict</span>
<span class="sd"> :param notebook_task: The notebook path and parameters for the notebook task.</span>
<span class="sd"> *EITHER* ``spark_jar_task`` *OR* ``notebook_task`` should be specified.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> .. seealso::</span>
<span class="sd"> https://docs.databricks.com/api/latest/jobs.html#jobsnotebooktask</span>
<span class="sd"> :type notebook_task: dict</span>
<span class="sd"> :param new_cluster: Specs for a new cluster on which this task will be run.</span>
<span class="sd"> *EITHER* ``new_cluster`` *OR* ``existing_cluster_id`` should be specified.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> .. seealso::</span>
<span class="sd"> https://docs.databricks.com/api/latest/jobs.html#jobsclusterspecnewcluster</span>
<span class="sd"> :type new_cluster: dict</span>
<span class="sd"> :param existing_cluster_id: ID for existing cluster on which to run this task.</span>
<span class="sd"> *EITHER* ``new_cluster`` *OR* ``existing_cluster_id`` should be specified.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> :type existing_cluster_id: string</span>
<span class="sd"> :param libraries: Libraries which this run will use.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> .. seealso::</span>
<span class="sd"> https://docs.databricks.com/api/latest/libraries.html#managedlibrarieslibrary</span>
<span class="sd"> :type libraries: list of dicts</span>
<span class="sd"> :param run_name: The run name used for this task.</span>
<span class="sd"> By default this will be set to the Airflow ``task_id``. This ``task_id`` is a</span>
<span class="sd"> required parameter of the superclass ``BaseOperator``.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> :type run_name: string</span>
<span class="sd"> :param timeout_seconds: The timeout for this run. By default a value of 0 is used</span>
<span class="sd"> which means to have no timeout.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> :type timeout_seconds: int32</span>
<span class="sd"> :param databricks_conn_id: The name of the Airflow connection to use.</span>
<span class="sd"> By default and in the common case this will be ``databricks_default``. To use</span>
<span class="sd"> token based authentication, provide the key ``token`` in the extra field for the</span>
<span class="sd"> connection.</span>
<span class="sd"> :type databricks_conn_id: string</span>
<span class="sd"> :param polling_period_seconds: Controls the rate which we poll for the result of</span>
<span class="sd"> this run. By default the operator will poll every 30 seconds.</span>
<span class="sd"> :type polling_period_seconds: int</span>
<span class="sd"> :param databricks_retry_limit: Amount of times retry if the Databricks backend is</span>
<span class="sd"> unreachable. Its value must be greater than or equal to 1.</span>
<span class="sd"> :type databricks_retry_limit: int</span>
<span class="sd"> :param databricks_retry_delay: Number of seconds to wait between retries (it</span>
<span class="sd"> might be a floating point number).</span>
<span class="sd"> :type databricks_retry_delay: float</span>
<span class="sd"> :param do_xcom_push: Whether we should push run_id and run_page_url to xcom.</span>
<span class="sd"> :type do_xcom_push: boolean</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># Used in airflow.models.BaseOperator</span>
<span class="n">template_fields</span> <span class="o">=</span> <span class="p">(</span><span class="s1">&#39;json&#39;</span><span class="p">,)</span>
<span class="c1"># Databricks brand color (blue) under white text</span>
<span class="n">ui_color</span> <span class="o">=</span> <span class="s1">&#39;#1CB1C2&#39;</span>
<span class="n">ui_fgcolor</span> <span class="o">=</span> <span class="s1">&#39;#fff&#39;</span>
<span class="nd">@apply_defaults</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span>
<span class="bp">self</span><span class="p">,</span>
<span class="n">json</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">spark_jar_task</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">notebook_task</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">new_cluster</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">existing_cluster_id</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">libraries</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">run_name</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">timeout_seconds</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">databricks_conn_id</span><span class="o">=</span><span class="s1">&#39;databricks_default&#39;</span><span class="p">,</span>
<span class="n">polling_period_seconds</span><span class="o">=</span><span class="mi">30</span><span class="p">,</span>
<span class="n">databricks_retry_limit</span><span class="o">=</span><span class="mi">3</span><span class="p">,</span>
<span class="n">databricks_retry_delay</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span>
<span class="n">do_xcom_push</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
<span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Creates a new ``DatabricksSubmitRunOperator``.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="nb">super</span><span class="p">(</span><span class="n">DatabricksSubmitRunOperator</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span> <span class="o">=</span> <span class="n">json</span> <span class="ow">or</span> <span class="p">{}</span>
<span class="bp">self</span><span class="o">.</span><span class="n">databricks_conn_id</span> <span class="o">=</span> <span class="n">databricks_conn_id</span>
<span class="bp">self</span><span class="o">.</span><span class="n">polling_period_seconds</span> <span class="o">=</span> <span class="n">polling_period_seconds</span>
<span class="bp">self</span><span class="o">.</span><span class="n">databricks_retry_limit</span> <span class="o">=</span> <span class="n">databricks_retry_limit</span>
<span class="bp">self</span><span class="o">.</span><span class="n">databricks_retry_delay</span> <span class="o">=</span> <span class="n">databricks_retry_delay</span>
<span class="k">if</span> <span class="n">spark_jar_task</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;spark_jar_task&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">spark_jar_task</span>
<span class="k">if</span> <span class="n">notebook_task</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;notebook_task&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">notebook_task</span>
<span class="k">if</span> <span class="n">new_cluster</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;new_cluster&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">new_cluster</span>
<span class="k">if</span> <span class="n">existing_cluster_id</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;existing_cluster_id&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">existing_cluster_id</span>
<span class="k">if</span> <span class="n">libraries</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;libraries&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">libraries</span>
<span class="k">if</span> <span class="n">run_name</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;run_name&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">run_name</span>
<span class="k">if</span> <span class="n">timeout_seconds</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;timeout_seconds&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">timeout_seconds</span>
<span class="k">if</span> <span class="s1">&#39;run_name&#39;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;run_name&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">run_name</span> <span class="ow">or</span> <span class="n">kwargs</span><span class="p">[</span><span class="s1">&#39;task_id&#39;</span><span class="p">]</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span> <span class="o">=</span> <span class="n">_deep_string_coerce</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">)</span>
<span class="c1"># This variable will be used in case our task gets killed.</span>
<span class="bp">self</span><span class="o">.</span><span class="n">run_id</span> <span class="o">=</span> <span class="kc">None</span>
<span class="bp">self</span><span class="o">.</span><span class="n">do_xcom_push</span> <span class="o">=</span> <span class="n">do_xcom_push</span>
<span class="k">def</span> <span class="nf">get_hook</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">return</span> <span class="n">DatabricksHook</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">databricks_conn_id</span><span class="p">,</span>
<span class="n">retry_limit</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">databricks_retry_limit</span><span class="p">,</span>
<span class="n">retry_delay</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">databricks_retry_delay</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">execute</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">context</span><span class="p">):</span>
<span class="n">hook</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_hook</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">run_id</span> <span class="o">=</span> <span class="n">hook</span><span class="o">.</span><span class="n">submit_run</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">)</span>
<span class="n">_handle_databricks_operator_execution</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hook</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="p">,</span> <span class="n">context</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">on_kill</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="n">hook</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_hook</span><span class="p">()</span>
<span class="n">hook</span><span class="o">.</span><span class="n">cancel_run</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">run_id</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span>
<span class="s1">&#39;Task: </span><span class="si">%s</span><span class="s1"> with run_id: </span><span class="si">%s</span><span class="s1"> was requested to be cancelled.&#39;</span><span class="p">,</span>
<span class="bp">self</span><span class="o">.</span><span class="n">task_id</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">run_id</span>
<span class="p">)</span></div>
<span class="k">class</span> <span class="nc">DatabricksRunNowOperator</span><span class="p">(</span><span class="n">BaseOperator</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Runs an existing Spark job run to Databricks using the</span>
<span class="sd"> `api/2.0/jobs/run-now</span>
<span class="sd"> &lt;https://docs.databricks.com/api/latest/jobs.html#run-now&gt;`_</span>
<span class="sd"> API endpoint.</span>
<span class="sd"> There are two ways to instantiate this operator.</span>
<span class="sd"> In the first way, you can take the JSON payload that you typically use</span>
<span class="sd"> to call the ``api/2.0/jobs/run-now`` endpoint and pass it directly</span>
<span class="sd"> to our ``DatabricksRunNowOperator`` through the ``json`` parameter.</span>
<span class="sd"> For example ::</span>
<span class="sd"> json = {</span>
<span class="sd"> &quot;job_id&quot;: 42,</span>
<span class="sd"> &quot;notebook_params&quot;: {</span>
<span class="sd"> &quot;dry-run&quot;: &quot;true&quot;,</span>
<span class="sd"> &quot;oldest-time-to-consider&quot;: &quot;1457570074236&quot;</span>
<span class="sd"> }</span>
<span class="sd"> }</span>
<span class="sd"> notebook_run = DatabricksRunNowOperator(task_id=&#39;notebook_run&#39;, json=json)</span>
<span class="sd"> Another way to accomplish the same thing is to use the named parameters</span>
<span class="sd"> of the ``DatabricksRunNowOperator`` directly. Note that there is exactly</span>
<span class="sd"> one named parameter for each top level parameter in the ``run-now``</span>
<span class="sd"> endpoint. In this method, your code would look like this: ::</span>
<span class="sd"> job_id=42</span>
<span class="sd"> notebook_params = {</span>
<span class="sd"> &quot;dry-run&quot;: &quot;true&quot;,</span>
<span class="sd"> &quot;oldest-time-to-consider&quot;: &quot;1457570074236&quot;</span>
<span class="sd"> }</span>
<span class="sd"> python_params = [&quot;douglas adams&quot;, &quot;42&quot;]</span>
<span class="sd"> spark_submit_params = [&quot;--class&quot;, &quot;org.apache.spark.examples.SparkPi&quot;]</span>
<span class="sd"> notebook_run = DatabricksRunNowOperator(</span>
<span class="sd"> job_id=job_id,</span>
<span class="sd"> notebook_params=notebook_params,</span>
<span class="sd"> python_params=python_params,</span>
<span class="sd"> spark_submit_params=spark_submit_params</span>
<span class="sd"> )</span>
<span class="sd"> In the case where both the json parameter **AND** the named parameters</span>
<span class="sd"> are provided, they will be merged together. If there are conflicts during the merge,</span>
<span class="sd"> the named parameters will take precedence and override the top level ``json`` keys.</span>
<span class="sd"> Currently the named parameters that ``DatabricksRunNowOperator`` supports are</span>
<span class="sd"> - ``job_id``</span>
<span class="sd"> - ``json``</span>
<span class="sd"> - ``notebook_params``</span>
<span class="sd"> - ``python_params``</span>
<span class="sd"> - ``spark_submit_params``</span>
<span class="sd"> :param job_id: the job_id of the existing Databricks job.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> .. seealso::</span>
<span class="sd"> https://docs.databricks.com/api/latest/jobs.html#run-now</span>
<span class="sd"> :type job_id: string</span>
<span class="sd"> :param json: A JSON object containing API parameters which will be passed</span>
<span class="sd"> directly to the ``api/2.0/jobs/run-now`` endpoint. The other named parameters</span>
<span class="sd"> (i.e. ``notebook_params``, ``spark_submit_params``..) to this operator will</span>
<span class="sd"> be merged with this json dictionary if they are provided.</span>
<span class="sd"> If there are conflicts during the merge, the named parameters will</span>
<span class="sd"> take precedence and override the top level json keys. (templated)</span>
<span class="sd"> .. seealso::</span>
<span class="sd"> For more information about templating see :ref:`jinja-templating`.</span>
<span class="sd"> https://docs.databricks.com/api/latest/jobs.html#run-now</span>
<span class="sd"> :type json: dict</span>
<span class="sd"> :param notebook_params: A dict from keys to values for jobs with notebook task,</span>
<span class="sd"> e.g. &quot;notebook_params&quot;: {&quot;name&quot;: &quot;john doe&quot;, &quot;age&quot;: &quot;35&quot;}.</span>
<span class="sd"> The map is passed to the notebook and will be accessible through the</span>
<span class="sd"> dbutils.widgets.get function. See Widgets for more information.</span>
<span class="sd"> If not specified upon run-now, the triggered run will use the</span>
<span class="sd"> job’s base parameters. notebook_params cannot be</span>
<span class="sd"> specified in conjunction with jar_params. The json representation</span>
<span class="sd"> of this field (i.e. {&quot;notebook_params&quot;:{&quot;name&quot;:&quot;john doe&quot;,&quot;age&quot;:&quot;35&quot;}})</span>
<span class="sd"> cannot exceed 10,000 bytes.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> .. seealso::</span>
<span class="sd"> https://docs.databricks.com/user-guide/notebooks/widgets.html</span>
<span class="sd"> :type notebook_params: dict</span>
<span class="sd"> :param python_params: A list of parameters for jobs with python tasks,</span>
<span class="sd"> e.g. &quot;python_params&quot;: [&quot;john doe&quot;, &quot;35&quot;].</span>
<span class="sd"> The parameters will be passed to python file as command line parameters.</span>
<span class="sd"> If specified upon run-now, it would overwrite the parameters specified in</span>
<span class="sd"> job setting.</span>
<span class="sd"> The json representation of this field (i.e. {&quot;python_params&quot;:[&quot;john doe&quot;,&quot;35&quot;]})</span>
<span class="sd"> cannot exceed 10,000 bytes.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> .. seealso::</span>
<span class="sd"> https://docs.databricks.com/api/latest/jobs.html#run-now</span>
<span class="sd"> :type python_params: array of strings</span>
<span class="sd"> :param spark_submit_params: A list of parameters for jobs with spark submit task,</span>
<span class="sd"> e.g. &quot;spark_submit_params&quot;: [&quot;--class&quot;, &quot;org.apache.spark.examples.SparkPi&quot;].</span>
<span class="sd"> The parameters will be passed to spark-submit script as command line parameters.</span>
<span class="sd"> If specified upon run-now, it would overwrite the parameters specified</span>
<span class="sd"> in job setting.</span>
<span class="sd"> The json representation of this field cannot exceed 10,000 bytes.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> .. seealso::</span>
<span class="sd"> https://docs.databricks.com/api/latest/jobs.html#run-now</span>
<span class="sd"> :type spark_submit_params: array of strings</span>
<span class="sd"> :param timeout_seconds: The timeout for this run. By default a value of 0 is used</span>
<span class="sd"> which means to have no timeout.</span>
<span class="sd"> This field will be templated.</span>
<span class="sd"> :type timeout_seconds: int32</span>
<span class="sd"> :param databricks_conn_id: The name of the Airflow connection to use.</span>
<span class="sd"> By default and in the common case this will be ``databricks_default``. To use</span>
<span class="sd"> token based authentication, provide the key ``token`` in the extra field for the</span>
<span class="sd"> connection.</span>
<span class="sd"> :type databricks_conn_id: string</span>
<span class="sd"> :param polling_period_seconds: Controls the rate which we poll for the result of</span>
<span class="sd"> this run. By default the operator will poll every 30 seconds.</span>
<span class="sd"> :type polling_period_seconds: int</span>
<span class="sd"> :param databricks_retry_limit: Amount of times retry if the Databricks backend is</span>
<span class="sd"> unreachable. Its value must be greater than or equal to 1.</span>
<span class="sd"> :type databricks_retry_limit: int</span>
<span class="sd"> :param do_xcom_push: Whether we should push run_id and run_page_url to xcom.</span>
<span class="sd"> :type do_xcom_push: boolean</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># Used in airflow.models.BaseOperator</span>
<span class="n">template_fields</span> <span class="o">=</span> <span class="p">(</span><span class="s1">&#39;json&#39;</span><span class="p">,)</span>
<span class="c1"># Databricks brand color (blue) under white text</span>
<span class="n">ui_color</span> <span class="o">=</span> <span class="s1">&#39;#1CB1C2&#39;</span>
<span class="n">ui_fgcolor</span> <span class="o">=</span> <span class="s1">&#39;#fff&#39;</span>
<span class="nd">@apply_defaults</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span>
<span class="bp">self</span><span class="p">,</span>
<span class="n">job_id</span><span class="p">,</span>
<span class="n">json</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">notebook_params</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">python_params</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">spark_submit_params</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">databricks_conn_id</span><span class="o">=</span><span class="s1">&#39;databricks_default&#39;</span><span class="p">,</span>
<span class="n">polling_period_seconds</span><span class="o">=</span><span class="mi">30</span><span class="p">,</span>
<span class="n">databricks_retry_limit</span><span class="o">=</span><span class="mi">3</span><span class="p">,</span>
<span class="n">databricks_retry_delay</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span>
<span class="n">do_xcom_push</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
<span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Creates a new ``DatabricksRunNowOperator``.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="nb">super</span><span class="p">(</span><span class="n">DatabricksRunNowOperator</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span> <span class="o">=</span> <span class="n">json</span> <span class="ow">or</span> <span class="p">{}</span>
<span class="bp">self</span><span class="o">.</span><span class="n">databricks_conn_id</span> <span class="o">=</span> <span class="n">databricks_conn_id</span>
<span class="bp">self</span><span class="o">.</span><span class="n">polling_period_seconds</span> <span class="o">=</span> <span class="n">polling_period_seconds</span>
<span class="bp">self</span><span class="o">.</span><span class="n">databricks_retry_limit</span> <span class="o">=</span> <span class="n">databricks_retry_limit</span>
<span class="bp">self</span><span class="o">.</span><span class="n">databricks_retry_delay</span> <span class="o">=</span> <span class="n">databricks_retry_delay</span>
<span class="k">if</span> <span class="n">job_id</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;job_id&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">job_id</span>
<span class="k">if</span> <span class="n">notebook_params</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;notebook_params&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">notebook_params</span>
<span class="k">if</span> <span class="n">python_params</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;python_params&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">python_params</span>
<span class="k">if</span> <span class="n">spark_submit_params</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">[</span><span class="s1">&#39;spark_submit_params&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">spark_submit_params</span>
<span class="bp">self</span><span class="o">.</span><span class="n">json</span> <span class="o">=</span> <span class="n">_deep_string_coerce</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">)</span>
<span class="c1"># This variable will be used in case our task gets killed.</span>
<span class="bp">self</span><span class="o">.</span><span class="n">run_id</span> <span class="o">=</span> <span class="kc">None</span>
<span class="bp">self</span><span class="o">.</span><span class="n">do_xcom_push</span> <span class="o">=</span> <span class="n">do_xcom_push</span>
<span class="k">def</span> <span class="nf">get_hook</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">return</span> <span class="n">DatabricksHook</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">databricks_conn_id</span><span class="p">,</span>
<span class="n">retry_limit</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">databricks_retry_limit</span><span class="p">,</span>
<span class="n">retry_delay</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">databricks_retry_delay</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">execute</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">context</span><span class="p">):</span>
<span class="n">hook</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_hook</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">run_id</span> <span class="o">=</span> <span class="n">hook</span><span class="o">.</span><span class="n">run_now</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">json</span><span class="p">)</span>
<span class="n">_handle_databricks_operator_execution</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hook</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="p">,</span> <span class="n">context</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">on_kill</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="n">hook</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_hook</span><span class="p">()</span>
<span class="n">hook</span><span class="o">.</span><span class="n">cancel_run</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">run_id</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span>
<span class="s1">&#39;Task: </span><span class="si">%s</span><span class="s1"> with run_id: </span><span class="si">%s</span><span class="s1"> was requested to be cancelled.&#39;</span><span class="p">,</span>
<span class="bp">self</span><span class="o">.</span><span class="n">task_id</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">run_id</span>
<span class="p">)</span>
</pre></div>
</div>
</div>
<footer>
<hr/>
<div role="contentinfo">
<p>
</p>
</div>
Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
</footer>
</div>
</div>
</section>
</div>
<script type="text/javascript" id="documentation_options" data-url_root="../../../../" src="../../../../_static/documentation_options.js"></script>
<script type="text/javascript" src="../../../../_static/jquery.js"></script>
<script type="text/javascript" src="../../../../_static/underscore.js"></script>
<script type="text/javascript" src="../../../../_static/doctools.js"></script>
<script type="text/javascript" src="../../../../_static/language_data.js"></script>
<script type="text/javascript" src="../../../../_static/js/theme.js"></script>
<script type="text/javascript">
jQuery(function () {
SphinxRtdTheme.Navigation.enable(true);
});
</script>
</body>
</html>