| |
| <!DOCTYPE html> |
| |
| <html> |
| <head> |
| <meta charset="utf-8" /> |
| <title>pyspark.sql.utils — PySpark 3.1.1 documentation</title> |
| |
| <link rel="stylesheet" href="../../../_static/css/index.73d71520a4ca3b99cfee5594769eaaae.css"> |
| |
| |
| <link rel="stylesheet" |
| href="../../../_static/vendor/fontawesome/5.13.0/css/all.min.css"> |
| <link rel="preload" as="font" type="font/woff2" crossorigin |
| href="../../../_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff2"> |
| <link rel="preload" as="font" type="font/woff2" crossorigin |
| href="../../../_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff2"> |
| |
| |
| |
| <link rel="stylesheet" |
| href="../../../_static/vendor/open-sans_all/1.44.1/index.css"> |
| <link rel="stylesheet" |
| href="../../../_static/vendor/lato_latin-ext/1.44.1/index.css"> |
| |
| |
| <link rel="stylesheet" href="../../../_static/basic.css" type="text/css" /> |
| <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" /> |
| <link rel="stylesheet" type="text/css" href="../../../_static/css/pyspark.css" /> |
| |
| <link rel="preload" as="script" href="../../../_static/js/index.3da636dd464baa7582d2.js"> |
| |
| <script id="documentation_options" data-url_root="../../../" src="../../../_static/documentation_options.js"></script> |
| <script src="../../../_static/jquery.js"></script> |
| <script src="../../../_static/underscore.js"></script> |
| <script src="../../../_static/doctools.js"></script> |
| <script src="../../../_static/language_data.js"></script> |
| <script src="../../../_static/copybutton.js"></script> |
| <script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script> |
| <script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script> |
| <script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script> |
| <link rel="search" title="Search" href="../../../search.html" /> |
| <meta name="viewport" content="width=device-width, initial-scale=1" /> |
| <meta name="docsearch:language" content="en" /> |
| <!-- Matomo --> |
| <script type="text/javascript"> |
| var _paq = window._paq = window._paq || []; |
| /* tracker methods like "setCustomDimension" should be called before "trackPageView" */ |
| _paq.push(["disableCookies"]); |
| _paq.push(['trackPageView']); |
| _paq.push(['enableLinkTracking']); |
| (function() { |
| var u="https://analytics.apache.org/"; |
| _paq.push(['setTrackerUrl', u+'matomo.php']); |
| _paq.push(['setSiteId', '40']); |
| var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0]; |
| g.async=true; g.src=u+'matomo.js'; s.parentNode.insertBefore(g,s); |
| })(); |
| </script> |
| <!-- End Matomo Code --> |
| </head> |
| <body data-spy="scroll" data-target="#bd-toc-nav" data-offset="80"> |
| |
| <nav class="navbar navbar-light navbar-expand-lg bg-light fixed-top bd-navbar" id="navbar-main"> |
| <div class="container-xl"> |
| |
| <a class="navbar-brand" href="../../../index.html"> |
| |
| <img src="../../../_static/spark-logo-reverse.png" class="logo" alt="logo" /> |
| |
| </a> |
| <button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbar-menu" aria-controls="navbar-menu" aria-expanded="false" aria-label="Toggle navigation"> |
| <span class="navbar-toggler-icon"></span> |
| </button> |
| |
| <div id="navbar-menu" class="col-lg-9 collapse navbar-collapse"> |
| <ul id="navbar-main-elements" class="navbar-nav mr-auto"> |
| |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../getting_started/index.html">Getting Started</a> |
| </li> |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../user_guide/index.html">User Guide</a> |
| </li> |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../reference/index.html">API Reference</a> |
| </li> |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../development/index.html">Development</a> |
| </li> |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../migration_guide/index.html">Migration Guide</a> |
| </li> |
| |
| |
| </ul> |
| |
| |
| |
| |
| <ul class="navbar-nav"> |
| |
| |
| </ul> |
| </div> |
| </div> |
| </nav> |
| |
| |
| <div class="container-xl"> |
| <div class="row"> |
| |
| <div class="col-12 col-md-3 bd-sidebar"><form class="bd-search d-flex align-items-center" action="../../../search.html" method="get"> |
| <i class="icon fas fa-search"></i> |
| <input type="search" class="form-control" name="q" id="search-input" placeholder="Search the docs ..." aria-label="Search the docs ..." autocomplete="off" > |
| </form> |
| <nav class="bd-links" id="bd-docs-nav" aria-label="Main navigation"> |
| |
| <div class="bd-toc-item active"> |
| |
| |
| <ul class="nav bd-sidenav"> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| </ul> |
| |
| </nav> |
| </div> |
| |
| |
| |
| <div class="d-none d-xl-block col-xl-2 bd-toc"> |
| |
| |
| <nav id="bd-toc-nav"> |
| <ul class="nav section-nav flex-column"> |
| |
| </ul> |
| </nav> |
| |
| |
| |
| </div> |
| |
| |
| |
| <main class="col-12 col-md-9 col-xl-7 py-md-5 pl-md-5 pr-md-4 bd-content" role="main"> |
| |
| <div> |
| |
| <h1>Source code for pyspark.sql.utils</h1><div class="highlight"><pre> |
| <span></span><span class="c1">#</span> |
| <span class="c1"># Licensed to the Apache Software Foundation (ASF) under one or more</span> |
| <span class="c1"># contributor license agreements. See the NOTICE file distributed with</span> |
| <span class="c1"># this work for additional information regarding copyright ownership.</span> |
| <span class="c1"># The ASF licenses this file to You under the Apache License, Version 2.0</span> |
| <span class="c1"># (the "License"); you may not use this file except in compliance with</span> |
| <span class="c1"># the License. You may obtain a copy of the License at</span> |
| <span class="c1">#</span> |
| <span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span> |
| <span class="c1">#</span> |
| <span class="c1"># Unless required by applicable law or agreed to in writing, software</span> |
| <span class="c1"># distributed under the License is distributed on an "AS IS" BASIS,</span> |
| <span class="c1"># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span> |
| <span class="c1"># See the License for the specific language governing permissions and</span> |
| <span class="c1"># limitations under the License.</span> |
| <span class="c1">#</span> |
| |
| <span class="kn">import</span> <span class="nn">py4j</span> |
| |
| <span class="kn">from</span> <span class="nn">pyspark</span> <span class="kn">import</span> <span class="n">SparkContext</span> |
| |
| |
| <span class="k">class</span> <span class="nc">CapturedException</span><span class="p">(</span><span class="ne">Exception</span><span class="p">):</span> |
| <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">desc</span><span class="p">,</span> <span class="n">stackTrace</span><span class="p">,</span> <span class="n">cause</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">desc</span> <span class="o">=</span> <span class="n">desc</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">stackTrace</span> <span class="o">=</span> <span class="n">stackTrace</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">cause</span> <span class="o">=</span> <span class="n">convert_exception</span><span class="p">(</span><span class="n">cause</span><span class="p">)</span> <span class="k">if</span> <span class="n">cause</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span> <span class="k">else</span> <span class="kc">None</span> |
| |
| <span class="k">def</span> <span class="fm">__str__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> |
| <span class="n">sql_conf</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_jvm</span><span class="o">.</span><span class="n">org</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">sql</span><span class="o">.</span><span class="n">internal</span><span class="o">.</span><span class="n">SQLConf</span><span class="o">.</span><span class="n">get</span><span class="p">()</span> |
| <span class="n">debug_enabled</span> <span class="o">=</span> <span class="n">sql_conf</span><span class="o">.</span><span class="n">pysparkJVMStacktraceEnabled</span><span class="p">()</span> |
| <span class="n">desc</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">desc</span> |
| <span class="k">if</span> <span class="n">debug_enabled</span><span class="p">:</span> |
| <span class="n">desc</span> <span class="o">=</span> <span class="n">desc</span> <span class="o">+</span> <span class="s2">"</span><span class="se">\n\n</span><span class="s2">JVM stacktrace:</span><span class="se">\n</span><span class="si">%s</span><span class="s2">"</span> <span class="o">%</span> <span class="bp">self</span><span class="o">.</span><span class="n">stackTrace</span> |
| <span class="k">return</span> <span class="nb">str</span><span class="p">(</span><span class="n">desc</span><span class="p">)</span> |
| |
| |
| <span class="k">class</span> <span class="nc">AnalysisException</span><span class="p">(</span><span class="n">CapturedException</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Failed to analyze a SQL query plan.</span> |
| <span class="sd"> """</span> |
| |
| |
| <span class="k">class</span> <span class="nc">ParseException</span><span class="p">(</span><span class="n">CapturedException</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Failed to parse a SQL command.</span> |
| <span class="sd"> """</span> |
| |
| |
| <span class="k">class</span> <span class="nc">IllegalArgumentException</span><span class="p">(</span><span class="n">CapturedException</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Passed an illegal or inappropriate argument.</span> |
| <span class="sd"> """</span> |
| |
| |
| <div class="viewcode-block" id="StreamingQueryException"><a class="viewcode-back" href="../../../reference/api/pyspark.sql.streaming.StreamingQueryException.html#pyspark.sql.streaming.StreamingQueryException">[docs]</a><span class="k">class</span> <span class="nc">StreamingQueryException</span><span class="p">(</span><span class="n">CapturedException</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Exception that stopped a :class:`StreamingQuery`.</span> |
| <span class="sd"> """</span></div> |
| |
| |
| <span class="k">class</span> <span class="nc">QueryExecutionException</span><span class="p">(</span><span class="n">CapturedException</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Failed to execute a query.</span> |
| <span class="sd"> """</span> |
| |
| |
| <span class="k">class</span> <span class="nc">PythonException</span><span class="p">(</span><span class="n">CapturedException</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Exceptions thrown from Python workers.</span> |
| <span class="sd"> """</span> |
| |
| |
| <span class="k">class</span> <span class="nc">UnknownException</span><span class="p">(</span><span class="n">CapturedException</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> None of the above exceptions.</span> |
| <span class="sd"> """</span> |
| |
| |
| <span class="k">def</span> <span class="nf">convert_exception</span><span class="p">(</span><span class="n">e</span><span class="p">):</span> |
| <span class="n">s</span> <span class="o">=</span> <span class="n">e</span><span class="o">.</span><span class="n">toString</span><span class="p">()</span> |
| <span class="n">c</span> <span class="o">=</span> <span class="n">e</span><span class="o">.</span><span class="n">getCause</span><span class="p">()</span> |
| <span class="n">stacktrace</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_jvm</span><span class="o">.</span><span class="n">org</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">util</span><span class="o">.</span><span class="n">Utils</span><span class="o">.</span><span class="n">exceptionString</span><span class="p">(</span><span class="n">e</span><span class="p">)</span> |
| |
| <span class="k">if</span> <span class="n">s</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'org.apache.spark.sql.AnalysisException: '</span><span class="p">):</span> |
| <span class="k">return</span> <span class="n">AnalysisException</span><span class="p">(</span><span class="n">s</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">': '</span><span class="p">,</span> <span class="mi">1</span><span class="p">)[</span><span class="mi">1</span><span class="p">],</span> <span class="n">stacktrace</span><span class="p">,</span> <span class="n">c</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">s</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'org.apache.spark.sql.catalyst.analysis'</span><span class="p">):</span> |
| <span class="k">return</span> <span class="n">AnalysisException</span><span class="p">(</span><span class="n">s</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">': '</span><span class="p">,</span> <span class="mi">1</span><span class="p">)[</span><span class="mi">1</span><span class="p">],</span> <span class="n">stacktrace</span><span class="p">,</span> <span class="n">c</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">s</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'org.apache.spark.sql.catalyst.parser.ParseException: '</span><span class="p">):</span> |
| <span class="k">return</span> <span class="n">ParseException</span><span class="p">(</span><span class="n">s</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">': '</span><span class="p">,</span> <span class="mi">1</span><span class="p">)[</span><span class="mi">1</span><span class="p">],</span> <span class="n">stacktrace</span><span class="p">,</span> <span class="n">c</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">s</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'org.apache.spark.sql.streaming.StreamingQueryException: '</span><span class="p">):</span> |
| <span class="k">return</span> <span class="n">StreamingQueryException</span><span class="p">(</span><span class="n">s</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">': '</span><span class="p">,</span> <span class="mi">1</span><span class="p">)[</span><span class="mi">1</span><span class="p">],</span> <span class="n">stacktrace</span><span class="p">,</span> <span class="n">c</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">s</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'org.apache.spark.sql.execution.QueryExecutionException: '</span><span class="p">):</span> |
| <span class="k">return</span> <span class="n">QueryExecutionException</span><span class="p">(</span><span class="n">s</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">': '</span><span class="p">,</span> <span class="mi">1</span><span class="p">)[</span><span class="mi">1</span><span class="p">],</span> <span class="n">stacktrace</span><span class="p">,</span> <span class="n">c</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">s</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'java.lang.IllegalArgumentException: '</span><span class="p">):</span> |
| <span class="k">return</span> <span class="n">IllegalArgumentException</span><span class="p">(</span><span class="n">s</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">': '</span><span class="p">,</span> <span class="mi">1</span><span class="p">)[</span><span class="mi">1</span><span class="p">],</span> <span class="n">stacktrace</span><span class="p">,</span> <span class="n">c</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">c</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span> <span class="ow">and</span> <span class="p">(</span> |
| <span class="n">c</span><span class="o">.</span><span class="n">toString</span><span class="p">()</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'org.apache.spark.api.python.PythonException: '</span><span class="p">)</span> |
| <span class="c1"># To make sure this only catches Python UDFs.</span> |
| <span class="ow">and</span> <span class="nb">any</span><span class="p">(</span><span class="nb">map</span><span class="p">(</span><span class="k">lambda</span> <span class="n">v</span><span class="p">:</span> <span class="s2">"org.apache.spark.sql.execution.python"</span> <span class="ow">in</span> <span class="n">v</span><span class="o">.</span><span class="n">toString</span><span class="p">(),</span> |
| <span class="n">c</span><span class="o">.</span><span class="n">getStackTrace</span><span class="p">()))):</span> |
| <span class="n">msg</span> <span class="o">=</span> <span class="p">(</span><span class="s2">"</span><span class="se">\n</span><span class="s2"> An exception was thrown from the Python worker. "</span> |
| <span class="s2">"Please see the stack trace below.</span><span class="se">\n</span><span class="si">%s</span><span class="s2">"</span> <span class="o">%</span> <span class="n">c</span><span class="o">.</span><span class="n">getMessage</span><span class="p">())</span> |
| <span class="k">return</span> <span class="n">PythonException</span><span class="p">(</span><span class="n">msg</span><span class="p">,</span> <span class="n">stacktrace</span><span class="p">)</span> |
| <span class="k">return</span> <span class="n">UnknownException</span><span class="p">(</span><span class="n">s</span><span class="p">,</span> <span class="n">stacktrace</span><span class="p">,</span> <span class="n">c</span><span class="p">)</span> |
| |
| |
| <span class="k">def</span> <span class="nf">capture_sql_exception</span><span class="p">(</span><span class="n">f</span><span class="p">):</span> |
| <span class="k">def</span> <span class="nf">deco</span><span class="p">(</span><span class="o">*</span><span class="n">a</span><span class="p">,</span> <span class="o">**</span><span class="n">kw</span><span class="p">):</span> |
| <span class="k">try</span><span class="p">:</span> |
| <span class="k">return</span> <span class="n">f</span><span class="p">(</span><span class="o">*</span><span class="n">a</span><span class="p">,</span> <span class="o">**</span><span class="n">kw</span><span class="p">)</span> |
| <span class="k">except</span> <span class="n">py4j</span><span class="o">.</span><span class="n">protocol</span><span class="o">.</span><span class="n">Py4JJavaError</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span> |
| <span class="n">converted</span> <span class="o">=</span> <span class="n">convert_exception</span><span class="p">(</span><span class="n">e</span><span class="o">.</span><span class="n">java_exception</span><span class="p">)</span> |
| <span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">converted</span><span class="p">,</span> <span class="n">UnknownException</span><span class="p">):</span> |
| <span class="c1"># Hide where the exception came from that shows a non-Pythonic</span> |
| <span class="c1"># JVM exception message.</span> |
| <span class="k">raise</span> <span class="n">converted</span> <span class="kn">from</span> <span class="bp">None</span> |
| <span class="k">else</span><span class="p">:</span> |
| <span class="k">raise</span> |
| <span class="k">return</span> <span class="n">deco</span> |
| |
| |
| <span class="k">def</span> <span class="nf">install_exception_handler</span><span class="p">():</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Hook an exception handler into Py4j, which could capture some SQL exceptions in Java.</span> |
| |
| <span class="sd"> When calling Java API, it will call `get_return_value` to parse the returned object.</span> |
| <span class="sd"> If any exception happened in JVM, the result will be Java exception object, it raise</span> |
| <span class="sd"> py4j.protocol.Py4JJavaError. We replace the original `get_return_value` with one that</span> |
| <span class="sd"> could capture the Java exception and throw a Python one (with the same error message).</span> |
| |
| <span class="sd"> It's idempotent, could be called multiple times.</span> |
| <span class="sd"> """</span> |
| <span class="n">original</span> <span class="o">=</span> <span class="n">py4j</span><span class="o">.</span><span class="n">protocol</span><span class="o">.</span><span class="n">get_return_value</span> |
| <span class="c1"># The original `get_return_value` is not patched, it's idempotent.</span> |
| <span class="n">patched</span> <span class="o">=</span> <span class="n">capture_sql_exception</span><span class="p">(</span><span class="n">original</span><span class="p">)</span> |
| <span class="c1"># only patch the one used in py4j.java_gateway (call Java API)</span> |
| <span class="n">py4j</span><span class="o">.</span><span class="n">java_gateway</span><span class="o">.</span><span class="n">get_return_value</span> <span class="o">=</span> <span class="n">patched</span> |
| |
| |
| <span class="k">def</span> <span class="nf">toJArray</span><span class="p">(</span><span class="n">gateway</span><span class="p">,</span> <span class="n">jtype</span><span class="p">,</span> <span class="n">arr</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Convert python list to java type array</span> |
| |
| <span class="sd"> Parameters</span> |
| <span class="sd"> ----------</span> |
| <span class="sd"> gateway :</span> |
| <span class="sd"> Py4j Gateway</span> |
| <span class="sd"> jtype :</span> |
| <span class="sd"> java type of element in array</span> |
| <span class="sd"> arr :</span> |
| <span class="sd"> python type list</span> |
| <span class="sd"> """</span> |
| <span class="n">jarray</span> <span class="o">=</span> <span class="n">gateway</span><span class="o">.</span><span class="n">new_array</span><span class="p">(</span><span class="n">jtype</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">arr</span><span class="p">))</span> |
| <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">arr</span><span class="p">)):</span> |
| <span class="n">jarray</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="o">=</span> <span class="n">arr</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> |
| <span class="k">return</span> <span class="n">jarray</span> |
| |
| |
| <span class="k">def</span> <span class="nf">require_test_compiled</span><span class="p">():</span> |
| <span class="sd">""" Raise Exception if test classes are not compiled</span> |
| <span class="sd"> """</span> |
| <span class="kn">import</span> <span class="nn">os</span> |
| <span class="kn">import</span> <span class="nn">glob</span> |
| <span class="k">try</span><span class="p">:</span> |
| <span class="n">spark_home</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="p">[</span><span class="s1">'SPARK_HOME'</span><span class="p">]</span> |
| <span class="k">except</span> <span class="ne">KeyError</span><span class="p">:</span> |
| <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s1">'SPARK_HOME is not defined in environment'</span><span class="p">)</span> |
| |
| <span class="n">test_class_path</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">path</span><span class="o">.</span><span class="n">join</span><span class="p">(</span> |
| <span class="n">spark_home</span><span class="p">,</span> <span class="s1">'sql'</span><span class="p">,</span> <span class="s1">'core'</span><span class="p">,</span> <span class="s1">'target'</span><span class="p">,</span> <span class="s1">'*'</span><span class="p">,</span> <span class="s1">'test-classes'</span><span class="p">)</span> |
| <span class="n">paths</span> <span class="o">=</span> <span class="n">glob</span><span class="o">.</span><span class="n">glob</span><span class="p">(</span><span class="n">test_class_path</span><span class="p">)</span> |
| |
| <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">paths</span><span class="p">)</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span> |
| <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span> |
| <span class="s2">"</span><span class="si">%s</span><span class="s2"> doesn't exist. Spark sql test classes are not compiled."</span> <span class="o">%</span> <span class="n">test_class_path</span><span class="p">)</span> |
| |
| |
| <div class="viewcode-block" id="ForeachBatchFunction"><a class="viewcode-back" href="../../../reference/api/pyspark.sql.streaming.ForeachBatchFunction.html#pyspark.sql.streaming.ForeachBatchFunction">[docs]</a><span class="k">class</span> <span class="nc">ForeachBatchFunction</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> This is the Python implementation of Java interface 'ForeachBatchFunction'. This wraps</span> |
| <span class="sd"> the user-defined 'foreachBatch' function such that it can be called from the JVM when</span> |
| <span class="sd"> the query is active.</span> |
| <span class="sd"> """</span> |
| |
| <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">sql_ctx</span><span class="p">,</span> <span class="n">func</span><span class="p">):</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">sql_ctx</span> <span class="o">=</span> <span class="n">sql_ctx</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">func</span> <span class="o">=</span> <span class="n">func</span> |
| |
| <span class="k">def</span> <span class="nf">call</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">jdf</span><span class="p">,</span> <span class="n">batch_id</span><span class="p">):</span> |
| <span class="kn">from</span> <span class="nn">pyspark.sql.dataframe</span> <span class="kn">import</span> <span class="n">DataFrame</span> |
| <span class="k">try</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">func</span><span class="p">(</span><span class="n">DataFrame</span><span class="p">(</span><span class="n">jdf</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">sql_ctx</span><span class="p">),</span> <span class="n">batch_id</span><span class="p">)</span> |
| <span class="k">except</span> <span class="ne">Exception</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">error</span> <span class="o">=</span> <span class="n">e</span> |
| <span class="k">raise</span> <span class="n">e</span> |
| |
| <span class="k">class</span> <span class="nc">Java</span><span class="p">:</span> |
| <span class="n">implements</span> <span class="o">=</span> <span class="p">[</span><span class="s1">'org.apache.spark.sql.execution.streaming.sources.PythonForeachBatchFunction'</span><span class="p">]</span></div> |
| |
| |
| <span class="k">def</span> <span class="nf">to_str</span><span class="p">(</span><span class="n">value</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> A wrapper over str(), but converts bool values to lower case strings.</span> |
| <span class="sd"> If None is given, just returns None, instead of converting it to string "None".</span> |
| <span class="sd"> """</span> |
| <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="nb">bool</span><span class="p">):</span> |
| <span class="k">return</span> <span class="nb">str</span><span class="p">(</span><span class="n">value</span><span class="p">)</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span> |
| <span class="k">elif</span> <span class="n">value</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span> |
| <span class="k">return</span> <span class="n">value</span> |
| <span class="k">else</span><span class="p">:</span> |
| <span class="k">return</span> <span class="nb">str</span><span class="p">(</span><span class="n">value</span><span class="p">)</span> |
| </pre></div> |
| |
| </div> |
| |
| |
| <div class='prev-next-bottom'> |
| |
| |
| </div> |
| |
| </main> |
| |
| |
| </div> |
| </div> |
| |
| |
| <script src="../../../_static/js/index.3da636dd464baa7582d2.js"></script> |
| |
| |
| <footer class="footer mt-5 mt-md-0"> |
| <div class="container"> |
| <p> |
| © Copyright .<br/> |
| Created using <a href="http://sphinx-doc.org/">Sphinx</a> 3.0.4.<br/> |
| </p> |
| </div> |
| </footer> |
| </body> |
| </html> |