blob: 44e00fb11f6c480323788a5b8e3a7cf2263d1b19 [file] [log] [blame]
<!--
Javascript to render AIRFLOW-XXX and PR references in text
as HTML links.
Overrides extrahead block from sphinx_rtd_theme
https://www.sphinx-doc.org/en/master/templating.html
-->
<!DOCTYPE html>
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>airflow.contrib.hooks.spark_submit_hook &mdash; Airflow Documentation</title>
<link rel="shortcut icon" href="../../../../_static/pin_32.png"/>
<script type="text/javascript" src="../../../../_static/js/modernizr.min.js"></script>
<script type="text/javascript" id="documentation_options" data-url_root="../../../../" src="../../../../_static/documentation_options.js"></script>
<script type="text/javascript" src="../../../../_static/jquery.js"></script>
<script type="text/javascript" src="../../../../_static/underscore.js"></script>
<script type="text/javascript" src="../../../../_static/doctools.js"></script>
<script type="text/javascript" src="../../../../_static/language_data.js"></script>
<script type="text/javascript" src="../../../../_static/js/theme.js"></script>
<link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" />
<link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" />
<link rel="stylesheet" href="../../../../_static/graphviz.css" type="text/css" />
<link rel="index" title="Index" href="../../../../genindex.html" />
<link rel="search" title="Search" href="../../../../search.html" />
<script>
document.addEventListener('DOMContentLoaded', function() {
var el = document.getElementById('changelog');
if (el !== null ) {
// [AIRFLOW-...]
el.innerHTML = el.innerHTML.replace(
/\[(AIRFLOW-[\d]+)\]/g,
`<a href="https://issues.apache.org/jira/browse/$1">[$1]</a>`
);
// (#...)
el.innerHTML = el.innerHTML.replace(
/\(#([\d]+)\)/g,
`<a href="https://github.com/apache/airflow/pull/$1">(#$1)</a>`
);
};
})
</script>
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-140539454-1']);
_gaq.push(['_trackPageview']);
</script>
<style>
.example-header {
position: relative;
background: #9AAA7A;
padding: 8px 16px;
margin-bottom: 0;
}
.example-header--with-button {
padding-right: 166px;
}
.example-header:after{
content: '';
display: table;
clear: both;
}
.example-title {
display:block;
padding: 4px;
margin-right: 16px;
color: white;
overflow-x: auto;
}
.example-header-button {
top: 8px;
right: 16px;
position: absolute;
}
.example-header + .highlight-python {
margin-top: 0 !important;
}
.viewcode-button {
display: inline-block;
padding: 8px 16px;
border: 0;
margin: 0;
outline: 0;
border-radius: 2px;
-webkit-box-shadow: 0 3px 5px 0 rgba(0,0,0,.3);
box-shadow: 0 3px 6px 0 rgba(0,0,0,.3);
color: #404040;
background-color: #e7e7e7;
cursor: pointer;
font-size: 16px;
font-weight: 500;
line-height: 1;
text-decoration: none;
text-overflow: ellipsis;
overflow: hidden;
text-transform: uppercase;
-webkit-transition: background-color .2s;
transition: background-color .2s;
vertical-align: middle;
white-space: nowrap;
}
.viewcode-button:visited {
color: #404040;
}
.viewcode-button:hover, .viewcode-button:focus {
color: #404040;
background-color: #d6d6d6;
}
</style>
</head>
<body class="wy-body-for-nav">
<div class="wy-grid-for-nav">
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
<div class="wy-side-scroll">
<div class="wy-side-nav-search" >
<a href="../../../../index.html" class="icon icon-home"> Airflow
</a>
<div class="version">
1.10.6
</div>
<div role="search">
<form id="rtd-search-form" class="wy-form" action="../../../../search.html" method="get">
<input type="text" name="q" placeholder="Search docs" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
<div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../../../project.html">Project</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../license.html">License</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../start.html">Quick Start</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../installation.html">Installation</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../tutorial.html">Tutorial</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../howto/index.html">How-to Guides</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../ui.html">UI / Screenshots</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../concepts.html">Concepts</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../profiling.html">Data Profiling</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../cli.html">Command Line Interface Reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../scheduler.html">Scheduling &amp; Triggers</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../plugins.html">Plugins</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../security.html">Security</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../timezone.html">Time zones</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../api.html">REST API Reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../integration.html">Integration</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../metrics.html">Metrics</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../errors.html">Error Tracking</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../kubernetes.html">Kubernetes</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../lineage.html">Lineage</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../changelog.html">Changelog</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../faq.html">FAQ</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../macros.html">Macros reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../_api/index.html">Python API Reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../privacy_notice.html">Privacy Notice</a></li>
</ul>
<p class="caption"><span class="caption-text">References</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../../../_api/index.html">Python API</a></li>
</ul>
</div>
</div>
</nav>
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
<nav class="wy-nav-top" aria-label="top navigation">
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
<a href="../../../../index.html">Airflow</a>
</nav>
<div class="wy-nav-content">
<div class="rst-content">
<div role="navigation" aria-label="breadcrumbs navigation">
<ul class="wy-breadcrumbs">
<li><a href="../../../../index.html">Docs</a> &raquo;</li>
<li><a href="../../../index.html">Module code</a> &raquo;</li>
<li><a href="../hooks.html">airflow.contrib.hooks</a> &raquo;</li>
<li>airflow.contrib.hooks.spark_submit_hook</li>
<li class="wy-breadcrumbs-aside">
</li>
</ul>
<hr/>
</div>
<div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
<div itemprop="articleBody">
<h1>Source code for airflow.contrib.hooks.spark_submit_hook</h1><div class="highlight"><pre>
<span></span><span class="c1"># -*- coding: utf-8 -*-</span>
<span class="c1">#</span>
<span class="c1"># Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="c1"># or more contributor license agreements. See the NOTICE file</span>
<span class="c1"># distributed with this work for additional information</span>
<span class="c1"># regarding copyright ownership. The ASF licenses this file</span>
<span class="c1"># to you under the Apache License, Version 2.0 (the</span>
<span class="c1"># &quot;License&quot;); you may not use this file except in compliance</span>
<span class="c1"># with the License. You may obtain a copy of the License at</span>
<span class="c1">#</span>
<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="c1">#</span>
<span class="c1"># Unless required by applicable law or agreed to in writing,</span>
<span class="c1"># software distributed under the License is distributed on an</span>
<span class="c1"># &quot;AS IS&quot; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY</span>
<span class="c1"># KIND, either express or implied. See the License for the</span>
<span class="c1"># specific language governing permissions and limitations</span>
<span class="c1"># under the License.</span>
<span class="c1">#</span>
<span class="kn">import</span> <span class="nn">os</span>
<span class="kn">import</span> <span class="nn">subprocess</span>
<span class="kn">import</span> <span class="nn">re</span>
<span class="kn">import</span> <span class="nn">time</span>
<span class="kn">from</span> <span class="nn">airflow.hooks.base_hook</span> <span class="k">import</span> <span class="n">BaseHook</span>
<span class="kn">from</span> <span class="nn">airflow.exceptions</span> <span class="k">import</span> <span class="n">AirflowException</span>
<span class="kn">from</span> <span class="nn">airflow.utils.log.logging_mixin</span> <span class="k">import</span> <span class="n">LoggingMixin</span>
<span class="kn">from</span> <span class="nn">airflow.contrib.kubernetes</span> <span class="k">import</span> <span class="n">kube_client</span>
<div class="viewcode-block" id="SparkSubmitHook"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook">[docs]</a><span class="k">class</span> <span class="nc">SparkSubmitHook</span><span class="p">(</span><span class="n">BaseHook</span><span class="p">,</span> <span class="n">LoggingMixin</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> This hook is a wrapper around the spark-submit binary to kick off a spark-submit job.</span>
<span class="sd"> It requires that the &quot;spark-submit&quot; binary is in the PATH or the spark_home to be</span>
<span class="sd"> supplied.</span>
<span class="sd"> :param conf: Arbitrary Spark configuration properties</span>
<span class="sd"> :type conf: dict</span>
<span class="sd"> :param conn_id: The connection id as configured in Airflow administration. When an</span>
<span class="sd"> invalid connection_id is supplied, it will default to yarn.</span>
<span class="sd"> :type conn_id: str</span>
<span class="sd"> :param files: Upload additional files to the executor running the job, separated by a</span>
<span class="sd"> comma. Files will be placed in the working directory of each executor.</span>
<span class="sd"> For example, serialized objects.</span>
<span class="sd"> :type files: str</span>
<span class="sd"> :param py_files: Additional python files used by the job, can be .zip, .egg or .py.</span>
<span class="sd"> :type py_files: str</span>
<span class="sd"> :param: archives: Archives that spark should unzip (and possibly tag with #ALIAS) into</span>
<span class="sd"> the application working directory.</span>
<span class="sd"> :param driver_class_path: Additional, driver-specific, classpath settings.</span>
<span class="sd"> :type driver_class_path: str</span>
<span class="sd"> :param jars: Submit additional jars to upload and place them in executor classpath.</span>
<span class="sd"> :type jars: str</span>
<span class="sd"> :param java_class: the main class of the Java application</span>
<span class="sd"> :type java_class: str</span>
<span class="sd"> :param packages: Comma-separated list of maven coordinates of jars to include on the</span>
<span class="sd"> driver and executor classpaths</span>
<span class="sd"> :type packages: str</span>
<span class="sd"> :param exclude_packages: Comma-separated list of maven coordinates of jars to exclude</span>
<span class="sd"> while resolving the dependencies provided in &#39;packages&#39;</span>
<span class="sd"> :type exclude_packages: str</span>
<span class="sd"> :param repositories: Comma-separated list of additional remote repositories to search</span>
<span class="sd"> for the maven coordinates given with &#39;packages&#39;</span>
<span class="sd"> :type repositories: str</span>
<span class="sd"> :param total_executor_cores: (Standalone &amp; Mesos only) Total cores for all executors</span>
<span class="sd"> (Default: all the available cores on the worker)</span>
<span class="sd"> :type total_executor_cores: int</span>
<span class="sd"> :param executor_cores: (Standalone, YARN and Kubernetes only) Number of cores per</span>
<span class="sd"> executor (Default: 2)</span>
<span class="sd"> :type executor_cores: int</span>
<span class="sd"> :param executor_memory: Memory per executor (e.g. 1000M, 2G) (Default: 1G)</span>
<span class="sd"> :type executor_memory: str</span>
<span class="sd"> :param driver_memory: Memory allocated to the driver (e.g. 1000M, 2G) (Default: 1G)</span>
<span class="sd"> :type driver_memory: str</span>
<span class="sd"> :param keytab: Full path to the file that contains the keytab</span>
<span class="sd"> :type keytab: str</span>
<span class="sd"> :param principal: The name of the kerberos principal used for keytab</span>
<span class="sd"> :type principal: str</span>
<span class="sd"> :param proxy_user: User to impersonate when submitting the application</span>
<span class="sd"> :type proxy_user: str</span>
<span class="sd"> :param name: Name of the job (default airflow-spark)</span>
<span class="sd"> :type name: str</span>
<span class="sd"> :param num_executors: Number of executors to launch</span>
<span class="sd"> :type num_executors: int</span>
<span class="sd"> :param application_args: Arguments for the application being submitted</span>
<span class="sd"> :type application_args: list</span>
<span class="sd"> :param env_vars: Environment variables for spark-submit. It</span>
<span class="sd"> supports yarn and k8s mode too.</span>
<span class="sd"> :type env_vars: dict</span>
<span class="sd"> :param verbose: Whether to pass the verbose flag to spark-submit process for debugging</span>
<span class="sd"> :type verbose: bool</span>
<span class="sd"> :param spark_binary: The command to use for spark submit.</span>
<span class="sd"> Some distros may use spark2-submit.</span>
<span class="sd"> :type spark_binary: str</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span>
<span class="n">conf</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">conn_id</span><span class="o">=</span><span class="s1">&#39;spark_default&#39;</span><span class="p">,</span>
<span class="n">files</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">py_files</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">archives</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">driver_class_path</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">jars</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">java_class</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">packages</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">exclude_packages</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">repositories</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">total_executor_cores</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">executor_cores</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">executor_memory</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">driver_memory</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">keytab</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">principal</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">proxy_user</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">name</span><span class="o">=</span><span class="s1">&#39;default-name&#39;</span><span class="p">,</span>
<span class="n">num_executors</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">application_args</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">env_vars</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">verbose</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
<span class="n">spark_binary</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_conf</span> <span class="o">=</span> <span class="n">conf</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_conn_id</span> <span class="o">=</span> <span class="n">conn_id</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_files</span> <span class="o">=</span> <span class="n">files</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_py_files</span> <span class="o">=</span> <span class="n">py_files</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_archives</span> <span class="o">=</span> <span class="n">archives</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_driver_class_path</span> <span class="o">=</span> <span class="n">driver_class_path</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_jars</span> <span class="o">=</span> <span class="n">jars</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_java_class</span> <span class="o">=</span> <span class="n">java_class</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_packages</span> <span class="o">=</span> <span class="n">packages</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_exclude_packages</span> <span class="o">=</span> <span class="n">exclude_packages</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_repositories</span> <span class="o">=</span> <span class="n">repositories</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_total_executor_cores</span> <span class="o">=</span> <span class="n">total_executor_cores</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_executor_cores</span> <span class="o">=</span> <span class="n">executor_cores</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_executor_memory</span> <span class="o">=</span> <span class="n">executor_memory</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_driver_memory</span> <span class="o">=</span> <span class="n">driver_memory</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_keytab</span> <span class="o">=</span> <span class="n">keytab</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_principal</span> <span class="o">=</span> <span class="n">principal</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_proxy_user</span> <span class="o">=</span> <span class="n">proxy_user</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_name</span> <span class="o">=</span> <span class="n">name</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_num_executors</span> <span class="o">=</span> <span class="n">num_executors</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_application_args</span> <span class="o">=</span> <span class="n">application_args</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_env_vars</span> <span class="o">=</span> <span class="n">env_vars</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_verbose</span> <span class="o">=</span> <span class="n">verbose</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_submit_sp</span> <span class="o">=</span> <span class="kc">None</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_yarn_application_id</span> <span class="o">=</span> <span class="kc">None</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_kubernetes_driver_pod</span> <span class="o">=</span> <span class="kc">None</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_spark_binary</span> <span class="o">=</span> <span class="n">spark_binary</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_connection</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_resolve_connection</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_is_yarn</span> <span class="o">=</span> <span class="s1">&#39;yarn&#39;</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;master&#39;</span><span class="p">]</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_is_kubernetes</span> <span class="o">=</span> <span class="s1">&#39;k8s&#39;</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;master&#39;</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_is_kubernetes</span> <span class="ow">and</span> <span class="n">kube_client</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span>
<span class="s2">&quot;</span><span class="si">{}</span><span class="s2"> specified by kubernetes dependencies are not installed!&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;master&#39;</span><span class="p">]))</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_should_track_driver_status</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_resolve_should_track_driver_status</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span> <span class="o">=</span> <span class="kc">None</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_driver_status</span> <span class="o">=</span> <span class="kc">None</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_spark_exit_code</span> <span class="o">=</span> <span class="kc">None</span>
<div class="viewcode-block" id="SparkSubmitHook._resolve_should_track_driver_status"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook._resolve_should_track_driver_status">[docs]</a> <span class="k">def</span> <span class="nf">_resolve_should_track_driver_status</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Determines whether or not this hook should poll the spark driver status through</span>
<span class="sd"> subsequent spark-submit status requests after the initial spark-submit request</span>
<span class="sd"> :return: if the driver status should be tracked</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="p">(</span><span class="s1">&#39;spark://&#39;</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;master&#39;</span><span class="p">]</span> <span class="ow">and</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;deploy_mode&#39;</span><span class="p">]</span> <span class="o">==</span> <span class="s1">&#39;cluster&#39;</span><span class="p">)</span></div>
<div class="viewcode-block" id="SparkSubmitHook._resolve_connection"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook._resolve_connection">[docs]</a> <span class="k">def</span> <span class="nf">_resolve_connection</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="c1"># Build from connection master or default to yarn if not available</span>
<span class="n">conn_data</span> <span class="o">=</span> <span class="p">{</span><span class="s1">&#39;master&#39;</span><span class="p">:</span> <span class="s1">&#39;yarn&#39;</span><span class="p">,</span>
<span class="s1">&#39;queue&#39;</span><span class="p">:</span> <span class="kc">None</span><span class="p">,</span>
<span class="s1">&#39;deploy_mode&#39;</span><span class="p">:</span> <span class="kc">None</span><span class="p">,</span>
<span class="s1">&#39;spark_home&#39;</span><span class="p">:</span> <span class="kc">None</span><span class="p">,</span>
<span class="s1">&#39;spark_binary&#39;</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">_spark_binary</span> <span class="ow">or</span> <span class="s2">&quot;spark-submit&quot;</span><span class="p">,</span>
<span class="s1">&#39;namespace&#39;</span><span class="p">:</span> <span class="s1">&#39;default&#39;</span><span class="p">}</span>
<span class="k">try</span><span class="p">:</span>
<span class="c1"># Master can be local, yarn, spark://HOST:PORT, mesos://HOST:PORT and</span>
<span class="c1"># k8s://https://&lt;HOST&gt;:&lt;PORT&gt;</span>
<span class="n">conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_connection</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_conn_id</span><span class="p">)</span>
<span class="k">if</span> <span class="n">conn</span><span class="o">.</span><span class="n">port</span><span class="p">:</span>
<span class="n">conn_data</span><span class="p">[</span><span class="s1">&#39;master&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="s2">&quot;</span><span class="si">{}</span><span class="s2">:</span><span class="si">{}</span><span class="s2">&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">conn</span><span class="o">.</span><span class="n">host</span><span class="p">,</span> <span class="n">conn</span><span class="o">.</span><span class="n">port</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">conn_data</span><span class="p">[</span><span class="s1">&#39;master&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">host</span>
<span class="c1"># Determine optional yarn queue from the extra field</span>
<span class="n">extra</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span>
<span class="n">conn_data</span><span class="p">[</span><span class="s1">&#39;queue&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">extra</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;queue&#39;</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span>
<span class="n">conn_data</span><span class="p">[</span><span class="s1">&#39;deploy_mode&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">extra</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;deploy-mode&#39;</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span>
<span class="n">conn_data</span><span class="p">[</span><span class="s1">&#39;spark_home&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">extra</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;spark-home&#39;</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span>
<span class="n">conn_data</span><span class="p">[</span><span class="s1">&#39;spark_binary&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_spark_binary</span> <span class="ow">or</span> \
<span class="n">extra</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;spark-binary&#39;</span><span class="p">,</span> <span class="s2">&quot;spark-submit&quot;</span><span class="p">)</span>
<span class="n">conn_data</span><span class="p">[</span><span class="s1">&#39;namespace&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">extra</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;namespace&#39;</span><span class="p">,</span> <span class="s1">&#39;default&#39;</span><span class="p">)</span>
<span class="k">except</span> <span class="n">AirflowException</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span>
<span class="s2">&quot;Could not load connection string </span><span class="si">%s</span><span class="s2">, defaulting to </span><span class="si">%s</span><span class="s2">&quot;</span><span class="p">,</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_conn_id</span><span class="p">,</span> <span class="n">conn_data</span><span class="p">[</span><span class="s1">&#39;master&#39;</span><span class="p">]</span>
<span class="p">)</span>
<span class="k">return</span> <span class="n">conn_data</span></div>
<div class="viewcode-block" id="SparkSubmitHook.get_conn"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook.get_conn">[docs]</a> <span class="k">def</span> <span class="nf">get_conn</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">pass</span></div>
<div class="viewcode-block" id="SparkSubmitHook._get_spark_binary_path"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook._get_spark_binary_path">[docs]</a> <span class="k">def</span> <span class="nf">_get_spark_binary_path</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="c1"># If the spark_home is passed then build the spark-submit executable path using</span>
<span class="c1"># the spark_home; otherwise assume that spark-submit is present in the path to</span>
<span class="c1"># the executing user</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;spark_home&#39;</span><span class="p">]:</span>
<span class="n">connection_cmd</span> <span class="o">=</span> <span class="p">[</span><span class="n">os</span><span class="o">.</span><span class="n">path</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;spark_home&#39;</span><span class="p">],</span> <span class="s1">&#39;bin&#39;</span><span class="p">,</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;spark_binary&#39;</span><span class="p">])]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">=</span> <span class="p">[</span><span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;spark_binary&#39;</span><span class="p">]]</span>
<span class="k">return</span> <span class="n">connection_cmd</span></div>
<div class="viewcode-block" id="SparkSubmitHook._build_spark_submit_command"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook._build_spark_submit_command">[docs]</a> <span class="k">def</span> <span class="nf">_build_spark_submit_command</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">application</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Construct the spark-submit command to execute.</span>
<span class="sd"> :param application: command to append to the spark-submit command</span>
<span class="sd"> :type application: str</span>
<span class="sd"> :return: full command to be executed</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">connection_cmd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_spark_binary_path</span><span class="p">()</span>
<span class="c1"># The url ot the spark master</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--master&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;master&#39;</span><span class="p">]]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_conf</span><span class="p">:</span>
<span class="k">for</span> <span class="n">key</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">_conf</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--conf&quot;</span><span class="p">,</span> <span class="s2">&quot;</span><span class="si">{}</span><span class="s2">=</span><span class="si">{}</span><span class="s2">&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_conf</span><span class="p">[</span><span class="n">key</span><span class="p">]))]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_env_vars</span> <span class="ow">and</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_is_kubernetes</span> <span class="ow">or</span> <span class="bp">self</span><span class="o">.</span><span class="n">_is_yarn</span><span class="p">):</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_is_yarn</span><span class="p">:</span>
<span class="n">tmpl</span> <span class="o">=</span> <span class="s2">&quot;spark.yarn.appMasterEnv.</span><span class="si">{}</span><span class="s2">=</span><span class="si">{}</span><span class="s2">&quot;</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">tmpl</span> <span class="o">=</span> <span class="s2">&quot;spark.kubernetes.driverEnv.</span><span class="si">{}</span><span class="s2">=</span><span class="si">{}</span><span class="s2">&quot;</span>
<span class="k">for</span> <span class="n">key</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">_env_vars</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span>
<span class="s2">&quot;--conf&quot;</span><span class="p">,</span>
<span class="n">tmpl</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_env_vars</span><span class="p">[</span><span class="n">key</span><span class="p">]))]</span>
<span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">_env_vars</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;deploy_mode&#39;</span><span class="p">]</span> <span class="o">!=</span> <span class="s2">&quot;cluster&quot;</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_env</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_env_vars</span> <span class="c1"># Do it on Popen of the process</span>
<span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">_env_vars</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;deploy_mode&#39;</span><span class="p">]</span> <span class="o">==</span> <span class="s2">&quot;cluster&quot;</span><span class="p">:</span>
<span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span>
<span class="s2">&quot;SparkSubmitHook env_vars is not supported in standalone-cluster mode.&quot;</span><span class="p">)</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_is_kubernetes</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--conf&quot;</span><span class="p">,</span> <span class="s2">&quot;spark.kubernetes.namespace=</span><span class="si">{}</span><span class="s2">&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;namespace&#39;</span><span class="p">])]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_files</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--files&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_files</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_py_files</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--py-files&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_py_files</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_archives</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--archives&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_archives</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_class_path</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--driver-class-path&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_class_path</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_jars</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--jars&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_jars</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_packages</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--packages&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_packages</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_exclude_packages</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--exclude-packages&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_exclude_packages</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_repositories</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--repositories&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_repositories</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_num_executors</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--num-executors&quot;</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_num_executors</span><span class="p">)]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_total_executor_cores</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--total-executor-cores&quot;</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_total_executor_cores</span><span class="p">)]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_executor_cores</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--executor-cores&quot;</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_executor_cores</span><span class="p">)]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_executor_memory</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--executor-memory&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_executor_memory</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_memory</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--driver-memory&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_memory</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_keytab</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--keytab&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_keytab</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_principal</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--principal&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_principal</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_proxy_user</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--proxy-user&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_proxy_user</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_name</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--name&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_name</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_java_class</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--class&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_java_class</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_verbose</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--verbose&quot;</span><span class="p">]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;queue&#39;</span><span class="p">]:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--queue&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;queue&#39;</span><span class="p">]]</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;deploy_mode&#39;</span><span class="p">]:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--deploy-mode&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;deploy_mode&#39;</span><span class="p">]]</span>
<span class="c1"># The actual script to execute</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="n">application</span><span class="p">]</span>
<span class="c1"># Append any application arguments</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_application_args</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_application_args</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">&quot;Spark-Submit cmd: </span><span class="si">%s</span><span class="s2">&quot;</span><span class="p">,</span> <span class="n">connection_cmd</span><span class="p">)</span>
<span class="k">return</span> <span class="n">connection_cmd</span></div>
<div class="viewcode-block" id="SparkSubmitHook._build_track_driver_status_command"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook._build_track_driver_status_command">[docs]</a> <span class="k">def</span> <span class="nf">_build_track_driver_status_command</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Construct the command to poll the driver status.</span>
<span class="sd"> :return: full command to be executed</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">connection_cmd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_spark_binary_path</span><span class="p">()</span>
<span class="c1"># The url ot the spark master</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--master&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;master&#39;</span><span class="p">]]</span>
<span class="c1"># The driver id so we can poll for its status</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--status&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span><span class="p">]</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span>
<span class="s2">&quot;Invalid status: attempted to poll driver &quot;</span> <span class="o">+</span>
<span class="s2">&quot;status but no driver id is known. Giving up.&quot;</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">debug</span><span class="p">(</span><span class="s2">&quot;Poll driver status cmd: </span><span class="si">%s</span><span class="s2">&quot;</span><span class="p">,</span> <span class="n">connection_cmd</span><span class="p">)</span>
<span class="k">return</span> <span class="n">connection_cmd</span></div>
<div class="viewcode-block" id="SparkSubmitHook.submit"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook.submit">[docs]</a> <span class="k">def</span> <span class="nf">submit</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">application</span><span class="o">=</span><span class="s2">&quot;&quot;</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Remote Popen to execute the spark-submit job</span>
<span class="sd"> :param application: Submitted application, jar or py file</span>
<span class="sd"> :type application: str</span>
<span class="sd"> :param kwargs: extra arguments to Popen (see subprocess.Popen)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">spark_submit_cmd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_build_spark_submit_command</span><span class="p">(</span><span class="n">application</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="s1">&#39;_env&#39;</span><span class="p">):</span>
<span class="n">env</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span>
<span class="n">env</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_env</span><span class="p">)</span>
<span class="n">kwargs</span><span class="p">[</span><span class="s2">&quot;env&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">env</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_submit_sp</span> <span class="o">=</span> <span class="n">subprocess</span><span class="o">.</span><span class="n">Popen</span><span class="p">(</span><span class="n">spark_submit_cmd</span><span class="p">,</span>
<span class="n">stdout</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">PIPE</span><span class="p">,</span>
<span class="n">stderr</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">STDOUT</span><span class="p">,</span>
<span class="n">bufsize</span><span class="o">=-</span><span class="mi">1</span><span class="p">,</span>
<span class="n">universal_newlines</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span>
<span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_process_spark_submit_log</span><span class="p">(</span><span class="nb">iter</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_submit_sp</span><span class="o">.</span><span class="n">stdout</span><span class="o">.</span><span class="n">readline</span><span class="p">,</span> <span class="s1">&#39;&#39;</span><span class="p">))</span>
<span class="n">returncode</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_submit_sp</span><span class="o">.</span><span class="n">wait</span><span class="p">()</span>
<span class="c1"># Check spark-submit return code. In Kubernetes mode, also check the value</span>
<span class="c1"># of exit code in the log, as it may differ.</span>
<span class="k">if</span> <span class="n">returncode</span> <span class="ow">or</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_is_kubernetes</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">_spark_exit_code</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">):</span>
<span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span>
<span class="s2">&quot;Cannot execute: </span><span class="si">{}</span><span class="s2">. Error code is: </span><span class="si">{}</span><span class="s2">.&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span>
<span class="n">spark_submit_cmd</span><span class="p">,</span> <span class="n">returncode</span>
<span class="p">)</span>
<span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">debug</span><span class="p">(</span><span class="s2">&quot;Should track driver: </span><span class="si">{}</span><span class="s2">&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_should_track_driver_status</span><span class="p">))</span>
<span class="c1"># We want the Airflow job to wait until the Spark driver is finished</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_should_track_driver_status</span><span class="p">:</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span>
<span class="s2">&quot;No driver id is known: something went wrong when executing &quot;</span> <span class="o">+</span>
<span class="s2">&quot;the spark submit command&quot;</span>
<span class="p">)</span>
<span class="c1"># We start with the SUBMITTED status as initial status</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_driver_status</span> <span class="o">=</span> <span class="s2">&quot;SUBMITTED&quot;</span>
<span class="c1"># Start tracking the driver status (blocking function)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_start_driver_status_tracking</span><span class="p">()</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_status</span> <span class="o">!=</span> <span class="s2">&quot;FINISHED&quot;</span><span class="p">:</span>
<span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span>
<span class="s2">&quot;ERROR : Driver </span><span class="si">{}</span><span class="s2"> badly exited with status </span><span class="si">{}</span><span class="s2">&quot;</span>
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_status</span><span class="p">)</span></div>
<span class="p">)</span>
<div class="viewcode-block" id="SparkSubmitHook._process_spark_submit_log"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook._process_spark_submit_log">[docs]</a> <span class="k">def</span> <span class="nf">_process_spark_submit_log</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">itr</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Processes the log files and extracts useful information out of it.</span>
<span class="sd"> If the deploy-mode is &#39;client&#39;, log the output of the submit command as those</span>
<span class="sd"> are the output logs of the Spark worker directly.</span>
<span class="sd"> Remark: If the driver needs to be tracked for its status, the log-level of the</span>
<span class="sd"> spark deploy needs to be at least INFO (log4j.logger.org.apache.spark.deploy=INFO)</span>
<span class="sd"> :param itr: An iterator which iterates over the input of the subprocess</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># Consume the iterator</span>
<span class="k">for</span> <span class="n">line</span> <span class="ow">in</span> <span class="n">itr</span><span class="p">:</span>
<span class="n">line</span> <span class="o">=</span> <span class="n">line</span><span class="o">.</span><span class="n">strip</span><span class="p">()</span>
<span class="c1"># If we run yarn cluster mode, we want to extract the application id from</span>
<span class="c1"># the logs so we can kill the application when we stop it unexpectedly</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_is_yarn</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;deploy_mode&#39;</span><span class="p">]</span> <span class="o">==</span> <span class="s1">&#39;cluster&#39;</span><span class="p">:</span>
<span class="n">match</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">search</span><span class="p">(</span><span class="s1">&#39;(application[0-9_]+)&#39;</span><span class="p">,</span> <span class="n">line</span><span class="p">)</span>
<span class="k">if</span> <span class="n">match</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_yarn_application_id</span> <span class="o">=</span> <span class="n">match</span><span class="o">.</span><span class="n">groups</span><span class="p">()[</span><span class="mi">0</span><span class="p">]</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">&quot;Identified spark driver id: </span><span class="si">%s</span><span class="s2">&quot;</span><span class="p">,</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_yarn_application_id</span><span class="p">)</span>
<span class="c1"># If we run Kubernetes cluster mode, we want to extract the driver pod id</span>
<span class="c1"># from the logs so we can kill the application when we stop it unexpectedly</span>
<span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">_is_kubernetes</span><span class="p">:</span>
<span class="n">match</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">search</span><span class="p">(</span><span class="sa">r</span><span class="s1">&#39;\s*pod name: ((.+?)-([a-z0-9]+)-driver)&#39;</span><span class="p">,</span> <span class="n">line</span><span class="p">)</span>
<span class="k">if</span> <span class="n">match</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_kubernetes_driver_pod</span> <span class="o">=</span> <span class="n">match</span><span class="o">.</span><span class="n">groups</span><span class="p">()[</span><span class="mi">0</span><span class="p">]</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">&quot;Identified spark driver pod: </span><span class="si">%s</span><span class="s2">&quot;</span><span class="p">,</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_kubernetes_driver_pod</span><span class="p">)</span>
<span class="c1"># Store the Spark Exit code</span>
<span class="n">match_exit_code</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">search</span><span class="p">(</span><span class="sa">r</span><span class="s1">&#39;\s*Exit code: (\d+)&#39;</span><span class="p">,</span> <span class="n">line</span><span class="p">)</span>
<span class="k">if</span> <span class="n">match_exit_code</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_spark_exit_code</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">match_exit_code</span><span class="o">.</span><span class="n">groups</span><span class="p">()[</span><span class="mi">0</span><span class="p">])</span>
<span class="c1"># if we run in standalone cluster mode and we want to track the driver status</span>
<span class="c1"># we need to extract the driver id from the logs. This allows us to poll for</span>
<span class="c1"># the status using the driver id. Also, we can kill the driver when needed.</span>
<span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">_should_track_driver_status</span> <span class="ow">and</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span><span class="p">:</span>
<span class="n">match_driver_id</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">search</span><span class="p">(</span><span class="sa">r</span><span class="s1">&#39;(driver-[0-9\-]+)&#39;</span><span class="p">,</span> <span class="n">line</span><span class="p">)</span>
<span class="k">if</span> <span class="n">match_driver_id</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span> <span class="o">=</span> <span class="n">match_driver_id</span><span class="o">.</span><span class="n">groups</span><span class="p">()[</span><span class="mi">0</span><span class="p">]</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">&quot;identified spark driver id: </span><span class="si">{}</span><span class="s2">&quot;</span>
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span><span class="p">))</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">line</span><span class="p">)</span></div>
<div class="viewcode-block" id="SparkSubmitHook._process_spark_status_log"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook._process_spark_status_log">[docs]</a> <span class="k">def</span> <span class="nf">_process_spark_status_log</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">itr</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> parses the logs of the spark driver status query process</span>
<span class="sd"> :param itr: An iterator which iterates over the input of the subprocess</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># Consume the iterator</span>
<span class="k">for</span> <span class="n">line</span> <span class="ow">in</span> <span class="n">itr</span><span class="p">:</span>
<span class="n">line</span> <span class="o">=</span> <span class="n">line</span><span class="o">.</span><span class="n">strip</span><span class="p">()</span>
<span class="c1"># Check if the log line is about the driver status and extract the status.</span>
<span class="k">if</span> <span class="s2">&quot;driverState&quot;</span> <span class="ow">in</span> <span class="n">line</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_driver_status</span> <span class="o">=</span> <span class="n">line</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">&#39; : &#39;</span><span class="p">)[</span><span class="mi">1</span><span class="p">]</span> \
<span class="o">.</span><span class="n">replace</span><span class="p">(</span><span class="s1">&#39;,&#39;</span><span class="p">,</span> <span class="s1">&#39;&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">replace</span><span class="p">(</span><span class="s1">&#39;</span><span class="se">\&quot;</span><span class="s1">&#39;</span><span class="p">,</span> <span class="s1">&#39;&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">strip</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">debug</span><span class="p">(</span><span class="s2">&quot;spark driver status log: </span><span class="si">{}</span><span class="s2">&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">line</span><span class="p">))</span></div>
<div class="viewcode-block" id="SparkSubmitHook._start_driver_status_tracking"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook._start_driver_status_tracking">[docs]</a> <span class="k">def</span> <span class="nf">_start_driver_status_tracking</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Polls the driver based on self._driver_id to get the status.</span>
<span class="sd"> Finish successfully when the status is FINISHED.</span>
<span class="sd"> Finish failed when the status is ERROR/UNKNOWN/KILLED/FAILED.</span>
<span class="sd"> Possible status:</span>
<span class="sd"> SUBMITTED</span>
<span class="sd"> Submitted but not yet scheduled on a worker</span>
<span class="sd"> RUNNING</span>
<span class="sd"> Has been allocated to a worker to run</span>
<span class="sd"> FINISHED</span>
<span class="sd"> Previously ran and exited cleanly</span>
<span class="sd"> RELAUNCHING</span>
<span class="sd"> Exited non-zero or due to worker failure, but has not yet</span>
<span class="sd"> started running again</span>
<span class="sd"> UNKNOWN</span>
<span class="sd"> The status of the driver is temporarily not known due to</span>
<span class="sd"> master failure recovery</span>
<span class="sd"> KILLED</span>
<span class="sd"> A user manually killed this driver</span>
<span class="sd"> FAILED</span>
<span class="sd"> The driver exited non-zero and was not supervised</span>
<span class="sd"> ERROR</span>
<span class="sd"> Unable to run or restart due to an unrecoverable error</span>
<span class="sd"> (e.g. missing jar file)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># When your Spark Standalone cluster is not performing well</span>
<span class="c1"># due to misconfiguration or heavy loads.</span>
<span class="c1"># it is possible that the polling request will timeout.</span>
<span class="c1"># Therefore we use a simple retry mechanism.</span>
<span class="n">missed_job_status_reports</span> <span class="o">=</span> <span class="mi">0</span>
<span class="n">max_missed_job_status_reports</span> <span class="o">=</span> <span class="mi">10</span>
<span class="c1"># Keep polling as long as the driver is processing</span>
<span class="k">while</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_status</span> <span class="ow">not</span> <span class="ow">in</span> <span class="p">[</span><span class="s2">&quot;FINISHED&quot;</span><span class="p">,</span> <span class="s2">&quot;UNKNOWN&quot;</span><span class="p">,</span>
<span class="s2">&quot;KILLED&quot;</span><span class="p">,</span> <span class="s2">&quot;FAILED&quot;</span><span class="p">,</span> <span class="s2">&quot;ERROR&quot;</span><span class="p">]:</span>
<span class="c1"># Sleep for 1 second as we do not want to spam the cluster</span>
<span class="n">time</span><span class="o">.</span><span class="n">sleep</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">debug</span><span class="p">(</span><span class="s2">&quot;polling status of spark driver with id </span><span class="si">{}</span><span class="s2">&quot;</span>
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span><span class="p">))</span>
<span class="n">poll_drive_status_cmd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_build_track_driver_status_command</span><span class="p">()</span>
<span class="n">status_process</span> <span class="o">=</span> <span class="n">subprocess</span><span class="o">.</span><span class="n">Popen</span><span class="p">(</span><span class="n">poll_drive_status_cmd</span><span class="p">,</span>
<span class="n">stdout</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">PIPE</span><span class="p">,</span>
<span class="n">stderr</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">STDOUT</span><span class="p">,</span>
<span class="n">bufsize</span><span class="o">=-</span><span class="mi">1</span><span class="p">,</span>
<span class="n">universal_newlines</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_process_spark_status_log</span><span class="p">(</span><span class="nb">iter</span><span class="p">(</span><span class="n">status_process</span><span class="o">.</span><span class="n">stdout</span><span class="o">.</span><span class="n">readline</span><span class="p">,</span> <span class="s1">&#39;&#39;</span><span class="p">))</span>
<span class="n">returncode</span> <span class="o">=</span> <span class="n">status_process</span><span class="o">.</span><span class="n">wait</span><span class="p">()</span>
<span class="k">if</span> <span class="n">returncode</span><span class="p">:</span>
<span class="k">if</span> <span class="n">missed_job_status_reports</span> <span class="o">&lt;</span> <span class="n">max_missed_job_status_reports</span><span class="p">:</span>
<span class="n">missed_job_status_reports</span> <span class="o">=</span> <span class="n">missed_job_status_reports</span> <span class="o">+</span> <span class="mi">1</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span>
<span class="s2">&quot;Failed to poll for the driver status </span><span class="si">{}</span><span class="s2"> times: returncode = </span><span class="si">{}</span><span class="s2">&quot;</span>
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">max_missed_job_status_reports</span><span class="p">,</span> <span class="n">returncode</span><span class="p">)</span></div>
<span class="p">)</span>
<div class="viewcode-block" id="SparkSubmitHook._build_spark_driver_kill_command"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook._build_spark_driver_kill_command">[docs]</a> <span class="k">def</span> <span class="nf">_build_spark_driver_kill_command</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Construct the spark-submit command to kill a driver.</span>
<span class="sd"> :return: full command to kill a driver</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># If the spark_home is passed then build the spark-submit executable path using</span>
<span class="c1"># the spark_home; otherwise assume that spark-submit is present in the path to</span>
<span class="c1"># the executing user</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;spark_home&#39;</span><span class="p">]:</span>
<span class="n">connection_cmd</span> <span class="o">=</span> <span class="p">[</span><span class="n">os</span><span class="o">.</span><span class="n">path</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;spark_home&#39;</span><span class="p">],</span>
<span class="s1">&#39;bin&#39;</span><span class="p">,</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;spark_binary&#39;</span><span class="p">])]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">connection_cmd</span> <span class="o">=</span> <span class="p">[</span><span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;spark_binary&#39;</span><span class="p">]]</span>
<span class="c1"># The url ot the spark master</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--master&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;master&#39;</span><span class="p">]]</span>
<span class="c1"># The actual kill command</span>
<span class="n">connection_cmd</span> <span class="o">+=</span> <span class="p">[</span><span class="s2">&quot;--kill&quot;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span><span class="p">]</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">debug</span><span class="p">(</span><span class="s2">&quot;Spark-Kill cmd: </span><span class="si">%s</span><span class="s2">&quot;</span><span class="p">,</span> <span class="n">connection_cmd</span><span class="p">)</span>
<span class="k">return</span> <span class="n">connection_cmd</span></div>
<div class="viewcode-block" id="SparkSubmitHook.on_kill"><a class="viewcode-back" href="../../../../_api/airflow/contrib/hooks/spark_submit_hook/index.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook.on_kill">[docs]</a> <span class="k">def</span> <span class="nf">on_kill</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">debug</span><span class="p">(</span><span class="s2">&quot;Kill Command is being called&quot;</span><span class="p">)</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_should_track_driver_status</span><span class="p">:</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;Killing driver </span><span class="si">{}</span><span class="s1"> on cluster&#39;</span>
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span><span class="p">))</span>
<span class="n">kill_cmd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_build_spark_driver_kill_command</span><span class="p">()</span>
<span class="n">driver_kill</span> <span class="o">=</span> <span class="n">subprocess</span><span class="o">.</span><span class="n">Popen</span><span class="p">(</span><span class="n">kill_cmd</span><span class="p">,</span>
<span class="n">stdout</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">PIPE</span><span class="p">,</span>
<span class="n">stderr</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">PIPE</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">&quot;Spark driver </span><span class="si">{}</span><span class="s2"> killed with return code: </span><span class="si">{}</span><span class="s2">&quot;</span>
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_driver_id</span><span class="p">,</span> <span class="n">driver_kill</span><span class="o">.</span><span class="n">wait</span><span class="p">()))</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_submit_sp</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">_submit_sp</span><span class="o">.</span><span class="n">poll</span><span class="p">()</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;Sending kill signal to </span><span class="si">%s</span><span class="s1">&#39;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;spark_binary&#39;</span><span class="p">])</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_submit_sp</span><span class="o">.</span><span class="n">kill</span><span class="p">()</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_yarn_application_id</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;Killing application </span><span class="si">{}</span><span class="s1"> on YARN&#39;</span>
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_yarn_application_id</span><span class="p">))</span>
<span class="n">kill_cmd</span> <span class="o">=</span> <span class="s2">&quot;yarn application -kill </span><span class="si">{}</span><span class="s2">&quot;</span> \
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_yarn_application_id</span><span class="p">)</span><span class="o">.</span><span class="n">split</span><span class="p">()</span>
<span class="n">yarn_kill</span> <span class="o">=</span> <span class="n">subprocess</span><span class="o">.</span><span class="n">Popen</span><span class="p">(</span><span class="n">kill_cmd</span><span class="p">,</span>
<span class="n">stdout</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">PIPE</span><span class="p">,</span>
<span class="n">stderr</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">PIPE</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">&quot;YARN killed with return code: </span><span class="si">%s</span><span class="s2">&quot;</span><span class="p">,</span> <span class="n">yarn_kill</span><span class="o">.</span><span class="n">wait</span><span class="p">())</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_kubernetes_driver_pod</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;Killing pod </span><span class="si">%s</span><span class="s1"> on Kubernetes&#39;</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_kubernetes_driver_pod</span><span class="p">)</span>
<span class="c1"># Currently only instantiate Kubernetes client for killing a spark pod.</span>
<span class="k">try</span><span class="p">:</span>
<span class="kn">import</span> <span class="nn">kubernetes</span>
<span class="n">client</span> <span class="o">=</span> <span class="n">kube_client</span><span class="o">.</span><span class="n">get_kube_client</span><span class="p">()</span>
<span class="n">api_response</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">delete_namespaced_pod</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_kubernetes_driver_pod</span><span class="p">,</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_connection</span><span class="p">[</span><span class="s1">&#39;namespace&#39;</span><span class="p">],</span>
<span class="n">body</span><span class="o">=</span><span class="n">kubernetes</span><span class="o">.</span><span class="n">client</span><span class="o">.</span><span class="n">V1DeleteOptions</span><span class="p">(),</span>
<span class="n">pretty</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">&quot;Spark on K8s killed with response: </span><span class="si">%s</span><span class="s2">&quot;</span><span class="p">,</span> <span class="n">api_response</span><span class="p">)</span>
<span class="k">except</span> <span class="n">kube_client</span><span class="o">.</span><span class="n">ApiException</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">&quot;Exception when attempting to kill Spark on K8s:&quot;</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">exception</span><span class="p">(</span><span class="n">e</span><span class="p">)</span></div></div>
</pre></div>
</div>
</div>
<footer>
<hr/>
<div role="contentinfo">
<p>
</p>
</div>
Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
<div class="footer">This page uses <a href="https://analytics.google.com/">
Google Analytics</a> to collect statistics. You can disable it by blocking
the JavaScript coming from www.google-analytics.com. Check our
<a href="../../../../privacy_notice.html">Privacy Policy</a>
for more details.
<script type="text/javascript">
(function() {
var ga = document.createElement('script');
ga.src = ('https:' == document.location.protocol ?
'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
ga.setAttribute('async', 'true');
var nodes = document.documentElement.childNodes;
var i = -1;
var node;
do {
i++;
node = nodes[i]
} while(node.nodeType !== Node.ELEMENT_NODE);
node.appendChild(ga);
})();
</script>
</div>
</footer>
</div>
</div>
</section>
</div>
<script type="text/javascript">
jQuery(function () {
SphinxRtdTheme.Navigation.enable(true);
});
</script>
</body>
</html>