blob: a823090509df728eb46a8b3477339bb2c625728a [file] [log] [blame]
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>pyspark.util &#8212; PySpark master documentation</title>
<link href="../../_static/styles/theme.css?digest=1999514e3f237ded88cf" rel="stylesheet">
<link href="../../_static/styles/pydata-sphinx-theme.css?digest=1999514e3f237ded88cf" rel="stylesheet">
<link rel="stylesheet"
href="../../_static/vendor/fontawesome/5.13.0/css/all.min.css">
<link rel="preload" as="font" type="font/woff2" crossorigin
href="../../_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff2">
<link rel="preload" as="font" type="font/woff2" crossorigin
href="../../_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff2">
<link rel="stylesheet" href="../../_static/styles/pydata-sphinx-theme.css" type="text/css" />
<link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
<link rel="stylesheet" type="text/css" href="../../_static/copybutton.css" />
<link rel="stylesheet" type="text/css" href="../../_static/css/pyspark.css" />
<link rel="preload" as="script" href="../../_static/scripts/pydata-sphinx-theme.js?digest=1999514e3f237ded88cf">
<script id="documentation_options" data-url_root="../../" src="../../_static/documentation_options.js"></script>
<script src="../../_static/jquery.js"></script>
<script src="../../_static/underscore.js"></script>
<script src="../../_static/doctools.js"></script>
<script src="../../_static/language_data.js"></script>
<script src="../../_static/clipboard.min.js"></script>
<script src="../../_static/copybutton.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "tex2jax_ignore|mathjax_ignore|document", "processClass": "tex2jax_process|mathjax_process|math|output_area"}})</script>
<link rel="canonical" href="https://spark.apache.org/docs/latest/api/python/_modules/pyspark/util.html" />
<link rel="search" title="Search" href="../../search.html" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="docsearch:language" content="None">
<!-- Google Analytics -->
</head>
<body data-spy="scroll" data-target="#bd-toc-nav" data-offset="80">
<div class="container-fluid" id="banner"></div>
<nav class="navbar navbar-light navbar-expand-lg bg-light fixed-top bd-navbar" id="navbar-main"><div class="container-xl">
<div id="navbar-start">
<a class="navbar-brand" href="../../index.html">
<img src="../../_static/spark-logo-reverse.png" class="logo" alt="logo">
</a>
</div>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbar-collapsible" aria-controls="navbar-collapsible" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div id="navbar-collapsible" class="col-lg-9 collapse navbar-collapse">
<div id="navbar-center" class="mr-auto">
<div class="navbar-center-item">
<ul id="navbar-main-elements" class="navbar-nav">
<li class="toctree-l1 nav-item">
<a class="reference internal nav-link" href="../../index.html">
Overview
</a>
</li>
<li class="toctree-l1 nav-item">
<a class="reference internal nav-link" href="../../getting_started/index.html">
Getting Started
</a>
</li>
<li class="toctree-l1 nav-item">
<a class="reference internal nav-link" href="../../user_guide/index.html">
User Guides
</a>
</li>
<li class="toctree-l1 nav-item">
<a class="reference internal nav-link" href="../../reference/index.html">
API Reference
</a>
</li>
<li class="toctree-l1 nav-item">
<a class="reference internal nav-link" href="../../development/index.html">
Development
</a>
</li>
<li class="toctree-l1 nav-item">
<a class="reference internal nav-link" href="../../migration_guide/index.html">
Migration Guides
</a>
</li>
</ul>
</div>
</div>
<div id="navbar-end">
<div class="navbar-end-item">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<div id="version-button" class="dropdown">
<button type="button" class="btn btn-secondary btn-sm navbar-btn dropdown-toggle" id="version_switcher_button" data-toggle="dropdown">
3.5.1
<span class="caret"></span>
</button>
<div id="version_switcher" class="dropdown-menu list-group-flush py-0" aria-labelledby="version_switcher_button">
<!-- dropdown will be populated by javascript on page load -->
</div>
</div>
<script type="text/javascript">
// Function to construct the target URL from the JSON components
function buildURL(entry) {
var template = "https://spark.apache.org/docs/{version}/api/python/index.html"; // supplied by jinja
template = template.replace("{version}", entry.version);
return template;
}
// Function to check if corresponding page path exists in other version of docs
// and, if so, go there instead of the homepage of the other docs version
function checkPageExistsAndRedirect(event) {
const currentFilePath = "_modules/pyspark/util.html",
otherDocsHomepage = event.target.getAttribute("href");
let tryUrl = `${otherDocsHomepage}${currentFilePath}`;
$.ajax({
type: 'HEAD',
url: tryUrl,
// if the page exists, go there
success: function() {
location.href = tryUrl;
}
}).fail(function() {
location.href = otherDocsHomepage;
});
return false;
}
// Function to populate the version switcher
(function () {
// get JSON config
$.getJSON("https://spark.apache.org/static/versions.json", function(data, textStatus, jqXHR) {
// create the nodes first (before AJAX calls) to ensure the order is
// correct (for now, links will go to doc version homepage)
$.each(data, function(index, entry) {
// if no custom name specified (e.g., "latest"), use version string
if (!("name" in entry)) {
entry.name = entry.version;
}
// construct the appropriate URL, and add it to the dropdown
entry.url = buildURL(entry);
const node = document.createElement("a");
node.setAttribute("class", "list-group-item list-group-item-action py-1");
node.setAttribute("href", `${entry.url}`);
node.textContent = `${entry.name}`;
node.onclick = checkPageExistsAndRedirect;
$("#version_switcher").append(node);
});
});
})();
</script>
</div>
</div>
</div>
</div>
</nav>
<div class="container-xl">
<div class="row">
<!-- Only show if we have sidebars configured, else just a small margin -->
<div class="col-12 col-md-3 bd-sidebar">
<div class="sidebar-start-items"><form class="bd-search d-flex align-items-center" action="../../search.html" method="get">
<i class="icon fas fa-search"></i>
<input type="search" class="form-control" name="q" id="search-input" placeholder="Search the docs ..." aria-label="Search the docs ..." autocomplete="off" >
</form><nav class="bd-links" id="bd-docs-nav" aria-label="Main navigation">
<div class="bd-toc-item active">
</div>
</nav>
</div>
<div class="sidebar-end-items">
</div>
</div>
<div class="d-none d-xl-block col-xl-2 bd-toc">
</div>
<main class="col-12 col-md-9 col-xl-7 py-md-5 pl-md-5 pr-md-4 bd-content" role="main">
<div>
<h1>Source code for pyspark.util</h1><div class="highlight"><pre>
<span></span><span class="c1"># -*- coding: utf-8 -*-</span>
<span class="c1">#</span>
<span class="c1"># Licensed to the Apache Software Foundation (ASF) under one or more</span>
<span class="c1"># contributor license agreements. See the NOTICE file distributed with</span>
<span class="c1"># this work for additional information regarding copyright ownership.</span>
<span class="c1"># The ASF licenses this file to You under the Apache License, Version 2.0</span>
<span class="c1"># (the &quot;License&quot;); you may not use this file except in compliance with</span>
<span class="c1"># the License. You may obtain a copy of the License at</span>
<span class="c1">#</span>
<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="c1">#</span>
<span class="c1"># Unless required by applicable law or agreed to in writing, software</span>
<span class="c1"># distributed under the License is distributed on an &quot;AS IS&quot; BASIS,</span>
<span class="c1"># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span>
<span class="c1"># See the License for the specific language governing permissions and</span>
<span class="c1"># limitations under the License.</span>
<span class="c1">#</span>
<span class="kn">import</span> <span class="nn">functools</span>
<span class="kn">import</span> <span class="nn">itertools</span>
<span class="kn">import</span> <span class="nn">os</span>
<span class="kn">import</span> <span class="nn">platform</span>
<span class="kn">import</span> <span class="nn">re</span>
<span class="kn">import</span> <span class="nn">sys</span>
<span class="kn">import</span> <span class="nn">threading</span>
<span class="kn">import</span> <span class="nn">traceback</span>
<span class="kn">from</span> <span class="nn">types</span> <span class="kn">import</span> <span class="n">TracebackType</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Any</span><span class="p">,</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">Iterator</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">Optional</span><span class="p">,</span> <span class="n">TextIO</span><span class="p">,</span> <span class="n">Tuple</span>
<span class="kn">from</span> <span class="nn">pyspark.errors</span> <span class="kn">import</span> <span class="n">PySparkRuntimeError</span>
<span class="kn">from</span> <span class="nn">py4j.clientserver</span> <span class="kn">import</span> <span class="n">ClientServer</span>
<span class="n">__all__</span><span class="p">:</span> <span class="n">List</span><span class="p">[</span><span class="nb">str</span><span class="p">]</span> <span class="o">=</span> <span class="p">[]</span>
<span class="kn">from</span> <span class="nn">py4j.java_gateway</span> <span class="kn">import</span> <span class="n">JavaObject</span>
<span class="k">def</span> <span class="nf">print_exec</span><span class="p">(</span><span class="n">stream</span><span class="p">:</span> <span class="n">TextIO</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">ei</span> <span class="o">=</span> <span class="n">sys</span><span class="o">.</span><span class="n">exc_info</span><span class="p">()</span>
<span class="n">traceback</span><span class="o">.</span><span class="n">print_exception</span><span class="p">(</span><span class="n">ei</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">ei</span><span class="p">[</span><span class="mi">1</span><span class="p">],</span> <span class="n">ei</span><span class="p">[</span><span class="mi">2</span><span class="p">],</span> <span class="kc">None</span><span class="p">,</span> <span class="n">stream</span><span class="p">)</span>
<div class="viewcode-block" id="VersionUtils"><a class="viewcode-back" href="../../reference/api/pyspark.util.VersionUtils.html#pyspark.VersionUtils">[docs]</a><span class="k">class</span> <span class="nc">VersionUtils</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Provides utility method to determine Spark versions with given input string.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<div class="viewcode-block" id="VersionUtils.majorMinorVersion"><a class="viewcode-back" href="../../reference/api/pyspark.util.VersionUtils.majorMinorVersion.html#pyspark.VersionUtils.majorMinorVersion">[docs]</a> <span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">majorMinorVersion</span><span class="p">(</span><span class="n">sparkVersion</span><span class="p">:</span> <span class="nb">str</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Tuple</span><span class="p">[</span><span class="nb">int</span><span class="p">,</span> <span class="nb">int</span><span class="p">]:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Given a Spark version string, return the (major version number, minor version number).</span>
<span class="sd"> E.g., for 2.0.1-SNAPSHOT, return (2, 0).</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; sparkVersion = &quot;2.4.0&quot;</span>
<span class="sd"> &gt;&gt;&gt; VersionUtils.majorMinorVersion(sparkVersion)</span>
<span class="sd"> (2, 4)</span>
<span class="sd"> &gt;&gt;&gt; sparkVersion = &quot;2.3.0-SNAPSHOT&quot;</span>
<span class="sd"> &gt;&gt;&gt; VersionUtils.majorMinorVersion(sparkVersion)</span>
<span class="sd"> (2, 3)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">m</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">search</span><span class="p">(</span><span class="sa">r</span><span class="s2">&quot;^(\d+)\.(\d+)(\..*)?$&quot;</span><span class="p">,</span> <span class="n">sparkVersion</span><span class="p">)</span>
<span class="k">if</span> <span class="n">m</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">return</span> <span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">m</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">1</span><span class="p">)),</span> <span class="nb">int</span><span class="p">(</span><span class="n">m</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">2</span><span class="p">)))</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span>
<span class="s2">&quot;Spark tried to parse &#39;</span><span class="si">%s</span><span class="s2">&#39; as a Spark&quot;</span> <span class="o">%</span> <span class="n">sparkVersion</span>
<span class="o">+</span> <span class="s2">&quot; version string, but it could not find the major and minor&quot;</span>
<span class="o">+</span> <span class="s2">&quot; version numbers.&quot;</span>
<span class="p">)</span></div></div>
<span class="k">def</span> <span class="nf">fail_on_stopiteration</span><span class="p">(</span><span class="n">f</span><span class="p">:</span> <span class="n">Callable</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Callable</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Wraps the input function to fail on &#39;StopIteration&#39; by raising a &#39;RuntimeError&#39;</span>
<span class="sd"> prevents silent loss of data when &#39;f&#39; is used in a for loop in Spark code</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">def</span> <span class="nf">wrapper</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">:</span> <span class="n">Any</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Any</span><span class="p">:</span>
<span class="k">try</span><span class="p">:</span>
<span class="k">return</span> <span class="n">f</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
<span class="k">except</span> <span class="ne">StopIteration</span> <span class="k">as</span> <span class="n">exc</span><span class="p">:</span>
<span class="k">raise</span> <span class="n">PySparkRuntimeError</span><span class="p">(</span>
<span class="n">error_class</span><span class="o">=</span><span class="s2">&quot;STOP_ITERATION_OCCURRED&quot;</span><span class="p">,</span>
<span class="n">message_parameters</span><span class="o">=</span><span class="p">{</span>
<span class="s2">&quot;exc&quot;</span><span class="p">:</span> <span class="nb">str</span><span class="p">(</span><span class="n">exc</span><span class="p">),</span>
<span class="p">},</span>
<span class="p">)</span>
<span class="k">return</span> <span class="n">wrapper</span>
<span class="k">def</span> <span class="nf">walk_tb</span><span class="p">(</span><span class="n">tb</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">TracebackType</span><span class="p">])</span> <span class="o">-&gt;</span> <span class="n">Iterator</span><span class="p">[</span><span class="n">TracebackType</span><span class="p">]:</span>
<span class="k">while</span> <span class="n">tb</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">yield</span> <span class="n">tb</span>
<span class="n">tb</span> <span class="o">=</span> <span class="n">tb</span><span class="o">.</span><span class="n">tb_next</span>
<span class="k">def</span> <span class="nf">try_simplify_traceback</span><span class="p">(</span><span class="n">tb</span><span class="p">:</span> <span class="n">TracebackType</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Optional</span><span class="p">[</span><span class="n">TracebackType</span><span class="p">]:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Simplify the traceback. It removes the tracebacks in the current package, and only</span>
<span class="sd"> shows the traceback that is related to the thirdparty and user-specified codes.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> TracebackType or None</span>
<span class="sd"> Simplified traceback instance. It returns None if it fails to simplify.</span>
<span class="sd"> Notes</span>
<span class="sd"> -----</span>
<span class="sd"> This keeps the tracebacks once it sees they are from a different file even</span>
<span class="sd"> though the following tracebacks are from the current package.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; import importlib</span>
<span class="sd"> &gt;&gt;&gt; import sys</span>
<span class="sd"> &gt;&gt;&gt; import traceback</span>
<span class="sd"> &gt;&gt;&gt; import tempfile</span>
<span class="sd"> &gt;&gt;&gt; with tempfile.TemporaryDirectory() as tmp_dir:</span>
<span class="sd"> ... with open(&quot;%s/dummy_module.py&quot; % tmp_dir, &quot;w&quot;) as f:</span>
<span class="sd"> ... _ = f.write(</span>
<span class="sd"> ... &#39;def raise_stop_iteration():\\n&#39;</span>
<span class="sd"> ... &#39; raise StopIteration()\\n\\n&#39;</span>
<span class="sd"> ... &#39;def simple_wrapper(f):\\n&#39;</span>
<span class="sd"> ... &#39; def wrapper(*a, **k):\\n&#39;</span>
<span class="sd"> ... &#39; return f(*a, **k)\\n&#39;</span>
<span class="sd"> ... &#39; return wrapper\\n&#39;)</span>
<span class="sd"> ... f.flush()</span>
<span class="sd"> ... spec = importlib.util.spec_from_file_location(</span>
<span class="sd"> ... &quot;dummy_module&quot;, &quot;%s/dummy_module.py&quot; % tmp_dir)</span>
<span class="sd"> ... dummy_module = importlib.util.module_from_spec(spec)</span>
<span class="sd"> ... spec.loader.exec_module(dummy_module)</span>
<span class="sd"> &gt;&gt;&gt; def skip_doctest_traceback(tb):</span>
<span class="sd"> ... import pyspark</span>
<span class="sd"> ... root = os.path.dirname(pyspark.__file__)</span>
<span class="sd"> ... pairs = zip(walk_tb(tb), traceback.extract_tb(tb))</span>
<span class="sd"> ... for cur_tb, cur_frame in pairs:</span>
<span class="sd"> ... if cur_frame.filename.startswith(root):</span>
<span class="sd"> ... return cur_tb</span>
<span class="sd"> Regular exceptions should show the file name of the current package as below.</span>
<span class="sd"> &gt;&gt;&gt; exc_info = None</span>
<span class="sd"> &gt;&gt;&gt; try:</span>
<span class="sd"> ... fail_on_stopiteration(dummy_module.raise_stop_iteration)()</span>
<span class="sd"> ... except Exception as e:</span>
<span class="sd"> ... tb = sys.exc_info()[-1]</span>
<span class="sd"> ... e.__cause__ = None</span>
<span class="sd"> ... exc_info = &quot;&quot;.join(</span>
<span class="sd"> ... traceback.format_exception(type(e), e, tb))</span>
<span class="sd"> &gt;&gt;&gt; print(exc_info) # doctest: +NORMALIZE_WHITESPACE, +ELLIPSIS</span>
<span class="sd"> Traceback (most recent call last):</span>
<span class="sd"> File ...</span>
<span class="sd"> ...</span>
<span class="sd"> File &quot;/.../pyspark/util.py&quot;, line ...</span>
<span class="sd"> ...</span>
<span class="sd"> pyspark.errors.exceptions.base.PySparkRuntimeError: ...</span>
<span class="sd"> &gt;&gt;&gt; &quot;pyspark/util.py&quot; in exc_info</span>
<span class="sd"> True</span>
<span class="sd"> If the traceback is simplified with this method, it hides the current package file name:</span>
<span class="sd"> &gt;&gt;&gt; exc_info = None</span>
<span class="sd"> &gt;&gt;&gt; try:</span>
<span class="sd"> ... fail_on_stopiteration(dummy_module.raise_stop_iteration)()</span>
<span class="sd"> ... except Exception as e:</span>
<span class="sd"> ... tb = try_simplify_traceback(sys.exc_info()[-1])</span>
<span class="sd"> ... e.__cause__ = None</span>
<span class="sd"> ... exc_info = &quot;&quot;.join(</span>
<span class="sd"> ... traceback.format_exception(</span>
<span class="sd"> ... type(e), e, try_simplify_traceback(skip_doctest_traceback(tb))))</span>
<span class="sd"> &gt;&gt;&gt; print(exc_info) # doctest: +NORMALIZE_WHITESPACE, +ELLIPSIS</span>
<span class="sd"> pyspark.errors.exceptions.base.PySparkRuntimeError: ...</span>
<span class="sd"> &gt;&gt;&gt; &quot;pyspark/util.py&quot; in exc_info</span>
<span class="sd"> False</span>
<span class="sd"> In the case below, the traceback contains the current package in the middle.</span>
<span class="sd"> In this case, it just hides the top occurrence only.</span>
<span class="sd"> &gt;&gt;&gt; exc_info = None</span>
<span class="sd"> &gt;&gt;&gt; try:</span>
<span class="sd"> ... fail_on_stopiteration(dummy_module.simple_wrapper(</span>
<span class="sd"> ... fail_on_stopiteration(dummy_module.raise_stop_iteration)))()</span>
<span class="sd"> ... except Exception as e:</span>
<span class="sd"> ... tb = sys.exc_info()[-1]</span>
<span class="sd"> ... e.__cause__ = None</span>
<span class="sd"> ... exc_info_a = &quot;&quot;.join(</span>
<span class="sd"> ... traceback.format_exception(type(e), e, tb))</span>
<span class="sd"> ... exc_info_b = &quot;&quot;.join(</span>
<span class="sd"> ... traceback.format_exception(</span>
<span class="sd"> ... type(e), e, try_simplify_traceback(skip_doctest_traceback(tb))))</span>
<span class="sd"> &gt;&gt;&gt; exc_info_a.count(&quot;pyspark/util.py&quot;)</span>
<span class="sd"> 2</span>
<span class="sd"> &gt;&gt;&gt; exc_info_b.count(&quot;pyspark/util.py&quot;)</span>
<span class="sd"> 1</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="s2">&quot;pypy&quot;</span> <span class="ow">in</span> <span class="n">platform</span><span class="o">.</span><span class="n">python_implementation</span><span class="p">()</span><span class="o">.</span><span class="n">lower</span><span class="p">():</span>
<span class="c1"># Traceback modification is not supported with PyPy in PySpark.</span>
<span class="k">return</span> <span class="kc">None</span>
<span class="k">if</span> <span class="n">sys</span><span class="o">.</span><span class="n">version_info</span><span class="p">[:</span><span class="mi">2</span><span class="p">]</span> <span class="o">&lt;</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">7</span><span class="p">):</span>
<span class="c1"># Traceback creation is not supported Python &lt; 3.7.</span>
<span class="c1"># See https://bugs.python.org/issue30579.</span>
<span class="k">return</span> <span class="kc">None</span>
<span class="kn">import</span> <span class="nn">pyspark</span>
<span class="n">root</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">path</span><span class="o">.</span><span class="n">dirname</span><span class="p">(</span><span class="n">pyspark</span><span class="o">.</span><span class="vm">__file__</span><span class="p">)</span>
<span class="n">tb_next</span> <span class="o">=</span> <span class="kc">None</span>
<span class="n">new_tb</span> <span class="o">=</span> <span class="kc">None</span>
<span class="n">pairs</span> <span class="o">=</span> <span class="nb">zip</span><span class="p">(</span><span class="n">walk_tb</span><span class="p">(</span><span class="n">tb</span><span class="p">),</span> <span class="n">traceback</span><span class="o">.</span><span class="n">extract_tb</span><span class="p">(</span><span class="n">tb</span><span class="p">))</span>
<span class="n">last_seen</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">cur_tb</span><span class="p">,</span> <span class="n">cur_frame</span> <span class="ow">in</span> <span class="n">pairs</span><span class="p">:</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">cur_frame</span><span class="o">.</span><span class="n">filename</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="n">root</span><span class="p">):</span>
<span class="c1"># Filter the stacktrace from the PySpark source itself.</span>
<span class="n">last_seen</span> <span class="o">=</span> <span class="p">[(</span><span class="n">cur_tb</span><span class="p">,</span> <span class="n">cur_frame</span><span class="p">)]</span>
<span class="k">break</span>
<span class="k">for</span> <span class="n">cur_tb</span><span class="p">,</span> <span class="n">cur_frame</span> <span class="ow">in</span> <span class="nb">reversed</span><span class="p">(</span><span class="nb">list</span><span class="p">(</span><span class="n">itertools</span><span class="o">.</span><span class="n">chain</span><span class="p">(</span><span class="n">last_seen</span><span class="p">,</span> <span class="n">pairs</span><span class="p">))):</span>
<span class="c1"># Once we have seen the file names outside, don&#39;t skip.</span>
<span class="n">new_tb</span> <span class="o">=</span> <span class="n">TracebackType</span><span class="p">(</span>
<span class="n">tb_next</span><span class="o">=</span><span class="n">tb_next</span><span class="p">,</span>
<span class="n">tb_frame</span><span class="o">=</span><span class="n">cur_tb</span><span class="o">.</span><span class="n">tb_frame</span><span class="p">,</span>
<span class="n">tb_lasti</span><span class="o">=</span><span class="n">cur_tb</span><span class="o">.</span><span class="n">tb_frame</span><span class="o">.</span><span class="n">f_lasti</span><span class="p">,</span>
<span class="n">tb_lineno</span><span class="o">=</span><span class="n">cur_tb</span><span class="o">.</span><span class="n">tb_frame</span><span class="o">.</span><span class="n">f_lineno</span> <span class="k">if</span> <span class="n">cur_tb</span><span class="o">.</span><span class="n">tb_frame</span><span class="o">.</span><span class="n">f_lineno</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span> <span class="k">else</span> <span class="o">-</span><span class="mi">1</span><span class="p">,</span>
<span class="p">)</span>
<span class="n">tb_next</span> <span class="o">=</span> <span class="n">new_tb</span>
<span class="k">return</span> <span class="n">new_tb</span>
<span class="k">def</span> <span class="nf">_print_missing_jar</span><span class="p">(</span><span class="n">lib_name</span><span class="p">:</span> <span class="nb">str</span><span class="p">,</span> <span class="n">pkg_name</span><span class="p">:</span> <span class="nb">str</span><span class="p">,</span> <span class="n">jar_name</span><span class="p">:</span> <span class="nb">str</span><span class="p">,</span> <span class="n">spark_version</span><span class="p">:</span> <span class="nb">str</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
<span class="nb">print</span><span class="p">(</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">________________________________________________________________________________________________</span>
<span class="sd"> Spark %(lib_name)s libraries not found in class path. Try one of the following.</span>
<span class="sd"> 1. Include the %(lib_name)s library and its dependencies with in the</span>
<span class="sd"> spark-submit command as</span>
<span class="sd"> $ bin/spark-submit --packages org.apache.spark:spark-%(pkg_name)s:%(spark_version)s ...</span>
<span class="sd"> 2. Download the JAR of the artifact from Maven Central http://search.maven.org/,</span>
<span class="sd"> Group Id = org.apache.spark, Artifact Id = spark-%(jar_name)s, Version = %(spark_version)s.</span>
<span class="sd"> Then, include the jar in the spark-submit command as</span>
<span class="sd"> $ bin/spark-submit --jars &lt;spark-%(jar_name)s.jar&gt; ...</span>
<span class="sd">________________________________________________________________________________________________</span>
<span class="sd">&quot;&quot;&quot;</span>
<span class="o">%</span> <span class="p">{</span>
<span class="s2">&quot;lib_name&quot;</span><span class="p">:</span> <span class="n">lib_name</span><span class="p">,</span>
<span class="s2">&quot;pkg_name&quot;</span><span class="p">:</span> <span class="n">pkg_name</span><span class="p">,</span>
<span class="s2">&quot;jar_name&quot;</span><span class="p">:</span> <span class="n">jar_name</span><span class="p">,</span>
<span class="s2">&quot;spark_version&quot;</span><span class="p">:</span> <span class="n">spark_version</span><span class="p">,</span>
<span class="p">}</span>
<span class="p">)</span>
<span class="k">def</span> <span class="nf">_parse_memory</span><span class="p">(</span><span class="n">s</span><span class="p">:</span> <span class="nb">str</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="nb">int</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Parse a memory string in the format supported by Java (e.g. 1g, 200m) and</span>
<span class="sd"> return the value in MiB</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; _parse_memory(&quot;256m&quot;)</span>
<span class="sd"> 256</span>
<span class="sd"> &gt;&gt;&gt; _parse_memory(&quot;2g&quot;)</span>
<span class="sd"> 2048</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">units</span> <span class="o">=</span> <span class="p">{</span><span class="s2">&quot;g&quot;</span><span class="p">:</span> <span class="mi">1024</span><span class="p">,</span> <span class="s2">&quot;m&quot;</span><span class="p">:</span> <span class="mi">1</span><span class="p">,</span> <span class="s2">&quot;t&quot;</span><span class="p">:</span> <span class="mi">1</span> <span class="o">&lt;&lt;</span> <span class="mi">20</span><span class="p">,</span> <span class="s2">&quot;k&quot;</span><span class="p">:</span> <span class="mf">1.0</span> <span class="o">/</span> <span class="mi">1024</span><span class="p">}</span>
<span class="k">if</span> <span class="n">s</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">]</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">units</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;invalid format: &quot;</span> <span class="o">+</span> <span class="n">s</span><span class="p">)</span>
<span class="k">return</span> <span class="nb">int</span><span class="p">(</span><span class="nb">float</span><span class="p">(</span><span class="n">s</span><span class="p">[:</span><span class="o">-</span><span class="mi">1</span><span class="p">])</span> <span class="o">*</span> <span class="n">units</span><span class="p">[</span><span class="n">s</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">]</span><span class="o">.</span><span class="n">lower</span><span class="p">()])</span>
<div class="viewcode-block" id="inheritable_thread_target"><a class="viewcode-back" href="../../reference/api/pyspark.inheritable_thread_target.html#pyspark.inheritable_thread_target">[docs]</a><span class="k">def</span> <span class="nf">inheritable_thread_target</span><span class="p">(</span><span class="n">f</span><span class="p">:</span> <span class="n">Callable</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Callable</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Return thread target wrapper which is recommended to be used in PySpark when the</span>
<span class="sd"> pinned thread mode is enabled. The wrapper function, before calling original</span>
<span class="sd"> thread target, it inherits the inheritable properties specific</span>
<span class="sd"> to JVM thread such as ``InheritableThreadLocal``.</span>
<span class="sd"> Also, note that pinned thread mode does not close the connection from Python</span>
<span class="sd"> to JVM when the thread is finished in the Python side. With this wrapper, Python</span>
<span class="sd"> garbage-collects the Python thread instance and also closes the connection</span>
<span class="sd"> which finishes JVM thread correctly.</span>
<span class="sd"> When the pinned thread mode is off, it return the original ``f``.</span>
<span class="sd"> .. versionadded:: 3.2.0</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> f : function</span>
<span class="sd"> the original thread target.</span>
<span class="sd"> Notes</span>
<span class="sd"> -----</span>
<span class="sd"> This API is experimental.</span>
<span class="sd"> It is important to know that it captures the local properties when you decorate it</span>
<span class="sd"> whereas :class:`InheritableThread` captures when the thread is started.</span>
<span class="sd"> Therefore, it is encouraged to decorate it when you want to capture the local</span>
<span class="sd"> properties.</span>
<span class="sd"> For example, the local properties from the current Spark context is captured</span>
<span class="sd"> when you define a function here instead of the invocation:</span>
<span class="sd"> &gt;&gt;&gt; @inheritable_thread_target</span>
<span class="sd"> ... def target_func():</span>
<span class="sd"> ... pass # your codes.</span>
<span class="sd"> If you have any updates on local properties afterwards, it would not be reflected to</span>
<span class="sd"> the Spark context in ``target_func()``.</span>
<span class="sd"> The example below mimics the behavior of JVM threads as close as possible:</span>
<span class="sd"> &gt;&gt;&gt; Thread(target=inheritable_thread_target(target_func)).start() # doctest: +SKIP</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="kn">from</span> <span class="nn">pyspark</span> <span class="kn">import</span> <span class="n">SparkContext</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">SparkContext</span><span class="o">.</span><span class="n">_gateway</span><span class="p">,</span> <span class="n">ClientServer</span><span class="p">):</span>
<span class="c1"># Here&#39;s when the pinned-thread mode (PYSPARK_PIN_THREAD) is on.</span>
<span class="c1"># NOTICE the internal difference vs `InheritableThread`. `InheritableThread`</span>
<span class="c1"># copies local properties when the thread starts but `inheritable_thread_target`</span>
<span class="c1"># copies when the function is wrapped.</span>
<span class="k">assert</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span>
<span class="n">properties</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span><span class="o">.</span><span class="n">_jsc</span><span class="o">.</span><span class="n">sc</span><span class="p">()</span><span class="o">.</span><span class="n">getLocalProperties</span><span class="p">()</span><span class="o">.</span><span class="n">clone</span><span class="p">()</span>
<span class="nd">@functools</span><span class="o">.</span><span class="n">wraps</span><span class="p">(</span><span class="n">f</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">wrapped</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">:</span> <span class="n">Any</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Any</span><span class="p">:</span>
<span class="c1"># Set local properties in child thread.</span>
<span class="k">assert</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span>
<span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span><span class="o">.</span><span class="n">_jsc</span><span class="o">.</span><span class="n">sc</span><span class="p">()</span><span class="o">.</span><span class="n">setLocalProperties</span><span class="p">(</span><span class="n">properties</span><span class="p">)</span>
<span class="k">return</span> <span class="n">f</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
<span class="k">return</span> <span class="n">wrapped</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="n">f</span></div>
<div class="viewcode-block" id="InheritableThread"><a class="viewcode-back" href="../../reference/api/pyspark.InheritableThread.html#pyspark.InheritableThread">[docs]</a><span class="k">class</span> <span class="nc">InheritableThread</span><span class="p">(</span><span class="n">threading</span><span class="o">.</span><span class="n">Thread</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Thread that is recommended to be used in PySpark instead of :class:`threading.Thread`</span>
<span class="sd"> when the pinned thread mode is enabled. The usage of this class is exactly same as</span>
<span class="sd"> :class:`threading.Thread` but correctly inherits the inheritable properties specific</span>
<span class="sd"> to JVM thread such as ``InheritableThreadLocal``.</span>
<span class="sd"> Also, note that pinned thread mode does not close the connection from Python</span>
<span class="sd"> to JVM when the thread is finished in the Python side. With this class, Python</span>
<span class="sd"> garbage-collects the Python thread instance and also closes the connection</span>
<span class="sd"> which finishes JVM thread correctly.</span>
<span class="sd"> When the pinned thread mode is off, this works as :class:`threading.Thread`.</span>
<span class="sd"> .. versionadded:: 3.1.0</span>
<span class="sd"> Notes</span>
<span class="sd"> -----</span>
<span class="sd"> This API is experimental.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">_props</span><span class="p">:</span> <span class="n">JavaObject</span>
<span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">target</span><span class="p">:</span> <span class="n">Callable</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">:</span> <span class="n">Any</span><span class="p">):</span>
<span class="kn">from</span> <span class="nn">pyspark</span> <span class="kn">import</span> <span class="n">SparkContext</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">SparkContext</span><span class="o">.</span><span class="n">_gateway</span><span class="p">,</span> <span class="n">ClientServer</span><span class="p">):</span>
<span class="c1"># Here&#39;s when the pinned-thread mode (PYSPARK_PIN_THREAD) is on.</span>
<span class="k">def</span> <span class="nf">copy_local_properties</span><span class="p">(</span><span class="o">*</span><span class="n">a</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="o">**</span><span class="n">k</span><span class="p">:</span> <span class="n">Any</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Any</span><span class="p">:</span>
<span class="c1"># self._props is set before starting the thread to match the behavior with JVM.</span>
<span class="k">assert</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="s2">&quot;_props&quot;</span><span class="p">)</span>
<span class="k">assert</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span>
<span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span><span class="o">.</span><span class="n">_jsc</span><span class="o">.</span><span class="n">sc</span><span class="p">()</span><span class="o">.</span><span class="n">setLocalProperties</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_props</span><span class="p">)</span>
<span class="k">return</span> <span class="n">target</span><span class="p">(</span><span class="o">*</span><span class="n">a</span><span class="p">,</span> <span class="o">**</span><span class="n">k</span><span class="p">)</span>
<span class="nb">super</span><span class="p">(</span><span class="n">InheritableThread</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">target</span><span class="o">=</span><span class="n">copy_local_properties</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span> <span class="c1"># type: ignore[misc]</span>
<span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="nb">super</span><span class="p">(</span><span class="n">InheritableThread</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">target</span><span class="o">=</span><span class="n">target</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span> <span class="c1"># type: ignore[misc]</span>
<span class="p">)</span>
<span class="k">def</span> <span class="nf">start</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
<span class="kn">from</span> <span class="nn">pyspark</span> <span class="kn">import</span> <span class="n">SparkContext</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">SparkContext</span><span class="o">.</span><span class="n">_gateway</span><span class="p">,</span> <span class="n">ClientServer</span><span class="p">):</span>
<span class="c1"># Here&#39;s when the pinned-thread mode (PYSPARK_PIN_THREAD) is on.</span>
<span class="c1"># Local property copy should happen in Thread.start to mimic JVM&#39;s behavior.</span>
<span class="k">assert</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_props</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span><span class="o">.</span><span class="n">_jsc</span><span class="o">.</span><span class="n">sc</span><span class="p">()</span><span class="o">.</span><span class="n">getLocalProperties</span><span class="p">()</span><span class="o">.</span><span class="n">clone</span><span class="p">()</span>
<span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">InheritableThread</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="n">start</span><span class="p">()</span></div>
<span class="k">if</span> <span class="vm">__name__</span> <span class="o">==</span> <span class="s2">&quot;__main__&quot;</span><span class="p">:</span>
<span class="k">if</span> <span class="s2">&quot;pypy&quot;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">platform</span><span class="o">.</span><span class="n">python_implementation</span><span class="p">()</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span> <span class="ow">and</span> <span class="n">sys</span><span class="o">.</span><span class="n">version_info</span><span class="p">[:</span><span class="mi">2</span><span class="p">]</span> <span class="o">&gt;=</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">7</span><span class="p">):</span>
<span class="kn">import</span> <span class="nn">doctest</span>
<span class="kn">import</span> <span class="nn">pyspark.util</span>
<span class="kn">from</span> <span class="nn">pyspark.context</span> <span class="kn">import</span> <span class="n">SparkContext</span>
<span class="n">globs</span> <span class="o">=</span> <span class="n">pyspark</span><span class="o">.</span><span class="n">util</span><span class="o">.</span><span class="vm">__dict__</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span>
<span class="n">globs</span><span class="p">[</span><span class="s2">&quot;sc&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="p">(</span><span class="s2">&quot;local[4]&quot;</span><span class="p">,</span> <span class="s2">&quot;PythonTest&quot;</span><span class="p">)</span>
<span class="p">(</span><span class="n">failure_count</span><span class="p">,</span> <span class="n">test_count</span><span class="p">)</span> <span class="o">=</span> <span class="n">doctest</span><span class="o">.</span><span class="n">testmod</span><span class="p">(</span><span class="n">pyspark</span><span class="o">.</span><span class="n">util</span><span class="p">,</span> <span class="n">globs</span><span class="o">=</span><span class="n">globs</span><span class="p">)</span>
<span class="n">globs</span><span class="p">[</span><span class="s2">&quot;sc&quot;</span><span class="p">]</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
<span class="k">if</span> <span class="n">failure_count</span><span class="p">:</span>
<span class="n">sys</span><span class="o">.</span><span class="n">exit</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span>
</pre></div>
</div>
<!-- Previous / next buttons -->
<div class='prev-next-area'>
</div>
</main>
</div>
</div>
<script src="../../_static/scripts/pydata-sphinx-theme.js?digest=1999514e3f237ded88cf"></script>
<footer class="footer mt-5 mt-md-0">
<div class="container">
<div class="footer-item">
<p class="copyright">
&copy; Copyright .<br>
</p>
</div>
<div class="footer-item">
<p class="sphinx-version">
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 3.0.4.<br>
</p>
</div>
</div>
</footer>
</body>
</html>