| |
| <!DOCTYPE html> |
| |
| <html> |
| <head> |
| <meta charset="utf-8" /> |
| <title>pyspark.sql.window — PySpark 3.4.2 documentation</title> |
| |
| <link rel="stylesheet" href="../../../_static/css/index.73d71520a4ca3b99cfee5594769eaaae.css"> |
| |
| |
| <link rel="stylesheet" |
| href="../../../_static/vendor/fontawesome/5.13.0/css/all.min.css"> |
| <link rel="preload" as="font" type="font/woff2" crossorigin |
| href="../../../_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff2"> |
| <link rel="preload" as="font" type="font/woff2" crossorigin |
| href="../../../_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff2"> |
| |
| |
| |
| <link rel="stylesheet" |
| href="../../../_static/vendor/open-sans_all/1.44.1/index.css"> |
| <link rel="stylesheet" |
| href="../../../_static/vendor/lato_latin-ext/1.44.1/index.css"> |
| |
| |
| <link rel="stylesheet" href="../../../_static/basic.css" type="text/css" /> |
| <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" /> |
| <link rel="stylesheet" type="text/css" href="../../../_static/copybutton.css" /> |
| <link rel="stylesheet" type="text/css" href="../../../_static/css/pyspark.css" /> |
| |
| <link rel="preload" as="script" href="../../../_static/js/index.3da636dd464baa7582d2.js"> |
| |
| <script id="documentation_options" data-url_root="../../../" src="../../../_static/documentation_options.js"></script> |
| <script src="../../../_static/jquery.js"></script> |
| <script src="../../../_static/underscore.js"></script> |
| <script src="../../../_static/doctools.js"></script> |
| <script src="../../../_static/language_data.js"></script> |
| <script src="../../../_static/clipboard.min.js"></script> |
| <script src="../../../_static/copybutton.js"></script> |
| <script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script> |
| <script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script> |
| <script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script> |
| <link rel="canonical" href="https://spark.apache.org/docs/latest/api/python/_modules/pyspark/sql/window.html" /> |
| <link rel="search" title="Search" href="../../../search.html" /> |
| <meta name="viewport" content="width=device-width, initial-scale=1" /> |
| <meta name="docsearch:language" content="en" /> |
| </head> |
| <body data-spy="scroll" data-target="#bd-toc-nav" data-offset="80"> |
| |
| <nav class="navbar navbar-light navbar-expand-lg bg-light fixed-top bd-navbar" id="navbar-main"> |
| <div class="container-xl"> |
| |
| <a class="navbar-brand" href="../../../index.html"> |
| |
| <img src="../../../_static/spark-logo-reverse.png" class="logo" alt="logo" /> |
| |
| </a> |
| <button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbar-menu" aria-controls="navbar-menu" aria-expanded="false" aria-label="Toggle navigation"> |
| <span class="navbar-toggler-icon"></span> |
| </button> |
| |
| <div id="navbar-menu" class="col-lg-9 collapse navbar-collapse"> |
| <ul id="navbar-main-elements" class="navbar-nav mr-auto"> |
| |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../index.html">Overview</a> |
| </li> |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../getting_started/index.html">Getting Started</a> |
| </li> |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../user_guide/index.html">User Guides</a> |
| </li> |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../reference/index.html">API Reference</a> |
| </li> |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../development/index.html">Development</a> |
| </li> |
| |
| <li class="nav-item "> |
| <a class="nav-link" href="../../../migration_guide/index.html">Migration Guides</a> |
| </li> |
| |
| |
| </ul> |
| |
| |
| |
| |
| <ul class="navbar-nav"> |
| |
| |
| </ul> |
| </div> |
| </div> |
| </nav> |
| |
| |
| <div class="container-xl"> |
| <div class="row"> |
| |
| <div class="col-12 col-md-3 bd-sidebar"><form class="bd-search d-flex align-items-center" action="../../../search.html" method="get"> |
| <i class="icon fas fa-search"></i> |
| <input type="search" class="form-control" name="q" id="search-input" placeholder="Search the docs ..." aria-label="Search the docs ..." autocomplete="off" > |
| </form> |
| <nav class="bd-links" id="bd-docs-nav" aria-label="Main navigation"> |
| |
| <div class="bd-toc-item active"> |
| |
| |
| <ul class="nav bd-sidenav"> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| </ul> |
| |
| </nav> |
| </div> |
| |
| |
| |
| <div class="d-none d-xl-block col-xl-2 bd-toc"> |
| |
| |
| <nav id="bd-toc-nav"> |
| <ul class="nav section-nav flex-column"> |
| |
| </ul> |
| </nav> |
| |
| |
| |
| </div> |
| |
| |
| |
| <main class="col-12 col-md-9 col-xl-7 py-md-5 pl-md-5 pr-md-4 bd-content" role="main"> |
| |
| <div> |
| |
| <h1>Source code for pyspark.sql.window</h1><div class="highlight"><pre> |
| <span></span><span class="c1">#</span> |
| <span class="c1"># Licensed to the Apache Software Foundation (ASF) under one or more</span> |
| <span class="c1"># contributor license agreements. See the NOTICE file distributed with</span> |
| <span class="c1"># this work for additional information regarding copyright ownership.</span> |
| <span class="c1"># The ASF licenses this file to You under the Apache License, Version 2.0</span> |
| <span class="c1"># (the "License"); you may not use this file except in compliance with</span> |
| <span class="c1"># the License. You may obtain a copy of the License at</span> |
| <span class="c1">#</span> |
| <span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span> |
| <span class="c1">#</span> |
| <span class="c1"># Unless required by applicable law or agreed to in writing, software</span> |
| <span class="c1"># distributed under the License is distributed on an "AS IS" BASIS,</span> |
| <span class="c1"># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span> |
| <span class="c1"># See the License for the specific language governing permissions and</span> |
| <span class="c1"># limitations under the License.</span> |
| <span class="c1">#</span> |
| <span class="kn">import</span> <span class="nn">sys</span> |
| <span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">cast</span><span class="p">,</span> <span class="n">Iterable</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">TYPE_CHECKING</span><span class="p">,</span> <span class="n">Union</span> |
| |
| <span class="kn">from</span> <span class="nn">py4j.java_gateway</span> <span class="kn">import</span> <span class="n">JavaObject</span><span class="p">,</span> <span class="n">JVMView</span> |
| |
| <span class="kn">from</span> <span class="nn">pyspark.sql.column</span> <span class="kn">import</span> <span class="n">_to_seq</span><span class="p">,</span> <span class="n">_to_java_column</span> |
| <span class="kn">from</span> <span class="nn">pyspark.sql.utils</span> <span class="kn">import</span> <span class="p">(</span> |
| <span class="n">try_remote_window</span><span class="p">,</span> |
| <span class="n">try_remote_windowspec</span><span class="p">,</span> |
| <span class="n">get_active_spark_context</span><span class="p">,</span> |
| <span class="p">)</span> |
| |
| <span class="k">if</span> <span class="n">TYPE_CHECKING</span><span class="p">:</span> |
| <span class="kn">from</span> <span class="nn">pyspark.sql._typing</span> <span class="kn">import</span> <span class="n">ColumnOrName</span><span class="p">,</span> <span class="n">ColumnOrName_</span> |
| |
| <span class="n">__all__</span> <span class="o">=</span> <span class="p">[</span><span class="s2">"Window"</span><span class="p">,</span> <span class="s2">"WindowSpec"</span><span class="p">]</span> |
| |
| |
| <span class="k">def</span> <span class="nf">_to_java_cols</span><span class="p">(</span><span class="n">cols</span><span class="p">:</span> <span class="n">Tuple</span><span class="p">[</span><span class="n">Union</span><span class="p">[</span><span class="s2">"ColumnOrName"</span><span class="p">,</span> <span class="n">List</span><span class="p">[</span><span class="s2">"ColumnOrName_"</span><span class="p">]],</span> <span class="o">...</span><span class="p">])</span> <span class="o">-></span> <span class="n">JavaObject</span><span class="p">:</span> |
| <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">cols</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span> <span class="ow">and</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">cols</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="nb">list</span><span class="p">):</span> |
| <span class="n">cols</span> <span class="o">=</span> <span class="n">cols</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="c1"># type: ignore[assignment]</span> |
| <span class="n">sc</span> <span class="o">=</span> <span class="n">get_active_spark_context</span><span class="p">()</span> |
| <span class="k">return</span> <span class="n">_to_seq</span><span class="p">(</span><span class="n">sc</span><span class="p">,</span> <span class="n">cast</span><span class="p">(</span><span class="n">Iterable</span><span class="p">[</span><span class="s2">"ColumnOrName"</span><span class="p">],</span> <span class="n">cols</span><span class="p">),</span> <span class="n">_to_java_column</span><span class="p">)</span> |
| |
| |
| <div class="viewcode-block" id="Window"><a class="viewcode-back" href="../../../reference/pyspark.sql/api/pyspark.sql.Window.html#pyspark.sql.Window">[docs]</a><span class="k">class</span> <span class="nc">Window</span><span class="p">:</span> |
| <span class="w"> </span><span class="sd">"""</span> |
| <span class="sd"> Utility functions for defining window in DataFrames.</span> |
| |
| <span class="sd"> .. versionadded:: 1.4.0</span> |
| |
| <span class="sd"> .. versionchanged:: 3.4.0</span> |
| <span class="sd"> Supports Spark Connect.</span> |
| |
| <span class="sd"> Notes</span> |
| <span class="sd"> -----</span> |
| <span class="sd"> When ordering is not defined, an unbounded window frame (rowFrame,</span> |
| <span class="sd"> unboundedPreceding, unboundedFollowing) is used by default. When ordering is defined,</span> |
| <span class="sd"> a growing window frame (rangeFrame, unboundedPreceding, currentRow) is used by default.</span> |
| |
| <span class="sd"> Examples</span> |
| <span class="sd"> --------</span> |
| <span class="sd"> >>> # ORDER BY date ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW</span> |
| <span class="sd"> >>> window = Window.orderBy("date").rowsBetween(Window.unboundedPreceding, Window.currentRow)</span> |
| |
| <span class="sd"> >>> # PARTITION BY country ORDER BY date RANGE BETWEEN 3 PRECEDING AND 3 FOLLOWING</span> |
| <span class="sd"> >>> window = Window.orderBy("date").partitionBy("country").rangeBetween(-3, 3)</span> |
| <span class="sd"> """</span> |
| |
| <span class="n">_JAVA_MIN_LONG</span> <span class="o">=</span> <span class="o">-</span><span class="p">(</span><span class="mi">1</span> <span class="o"><<</span> <span class="mi">63</span><span class="p">)</span> <span class="c1"># -9223372036854775808</span> |
| <span class="n">_JAVA_MAX_LONG</span> <span class="o">=</span> <span class="p">(</span><span class="mi">1</span> <span class="o"><<</span> <span class="mi">63</span><span class="p">)</span> <span class="o">-</span> <span class="mi">1</span> <span class="c1"># 9223372036854775807</span> |
| <span class="n">_PRECEDING_THRESHOLD</span> <span class="o">=</span> <span class="nb">max</span><span class="p">(</span><span class="o">-</span><span class="n">sys</span><span class="o">.</span><span class="n">maxsize</span><span class="p">,</span> <span class="n">_JAVA_MIN_LONG</span><span class="p">)</span> |
| <span class="n">_FOLLOWING_THRESHOLD</span> <span class="o">=</span> <span class="nb">min</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">maxsize</span><span class="p">,</span> <span class="n">_JAVA_MAX_LONG</span><span class="p">)</span> |
| |
| <span class="n">unboundedPreceding</span><span class="p">:</span> <span class="nb">int</span> <span class="o">=</span> <span class="n">_JAVA_MIN_LONG</span> |
| |
| <span class="n">unboundedFollowing</span><span class="p">:</span> <span class="nb">int</span> <span class="o">=</span> <span class="n">_JAVA_MAX_LONG</span> |
| |
| <span class="n">currentRow</span><span class="p">:</span> <span class="nb">int</span> <span class="o">=</span> <span class="mi">0</span> |
| |
| <div class="viewcode-block" id="Window.partitionBy"><a class="viewcode-back" href="../../../reference/pyspark.sql/api/pyspark.sql.Window.partitionBy.html#pyspark.sql.Window.partitionBy">[docs]</a> <span class="nd">@staticmethod</span> |
| <span class="nd">@try_remote_window</span> |
| <span class="k">def</span> <span class="nf">partitionBy</span><span class="p">(</span><span class="o">*</span><span class="n">cols</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="s2">"ColumnOrName"</span><span class="p">,</span> <span class="n">List</span><span class="p">[</span><span class="s2">"ColumnOrName_"</span><span class="p">]])</span> <span class="o">-></span> <span class="s2">"WindowSpec"</span><span class="p">:</span> |
| <span class="w"> </span><span class="sd">"""</span> |
| <span class="sd"> Creates a :class:`WindowSpec` with the partitioning defined.</span> |
| |
| <span class="sd"> .. versionadded:: 1.4.0</span> |
| |
| <span class="sd"> Parameters</span> |
| <span class="sd"> ----------</span> |
| <span class="sd"> cols : str, :class:`Column` or list</span> |
| <span class="sd"> names of columns or expressions</span> |
| |
| <span class="sd"> Returns</span> |
| <span class="sd"> -------</span> |
| <span class="sd"> :class: `WindowSpec`</span> |
| <span class="sd"> A :class:`WindowSpec` with the partitioning defined.</span> |
| |
| <span class="sd"> Examples</span> |
| <span class="sd"> --------</span> |
| <span class="sd"> >>> from pyspark.sql import Window</span> |
| <span class="sd"> >>> from pyspark.sql.functions import row_number</span> |
| <span class="sd"> >>> df = spark.createDataFrame(</span> |
| <span class="sd"> ... [(1, "a"), (1, "a"), (2, "a"), (1, "b"), (2, "b"), (3, "b")], ["id", "category"])</span> |
| <span class="sd"> >>> df.show()</span> |
| <span class="sd"> +---+--------+</span> |
| <span class="sd"> | id|category|</span> |
| <span class="sd"> +---+--------+</span> |
| <span class="sd"> | 1| a|</span> |
| <span class="sd"> | 1| a|</span> |
| <span class="sd"> | 2| a|</span> |
| <span class="sd"> | 1| b|</span> |
| <span class="sd"> | 2| b|</span> |
| <span class="sd"> | 3| b|</span> |
| <span class="sd"> +---+--------+</span> |
| |
| <span class="sd"> Show row number order by ``id`` in partition ``category``.</span> |
| |
| <span class="sd"> >>> window = Window.partitionBy("category").orderBy("id")</span> |
| <span class="sd"> >>> df.withColumn("row_number", row_number().over(window)).show()</span> |
| <span class="sd"> +---+--------+----------+</span> |
| <span class="sd"> | id|category|row_number|</span> |
| <span class="sd"> +---+--------+----------+</span> |
| <span class="sd"> | 1| a| 1|</span> |
| <span class="sd"> | 1| a| 2|</span> |
| <span class="sd"> | 2| a| 3|</span> |
| <span class="sd"> | 1| b| 1|</span> |
| <span class="sd"> | 2| b| 2|</span> |
| <span class="sd"> | 3| b| 3|</span> |
| <span class="sd"> +---+--------+----------+</span> |
| <span class="sd"> """</span> |
| <span class="n">sc</span> <span class="o">=</span> <span class="n">get_active_spark_context</span><span class="p">()</span> |
| <span class="n">jspec</span> <span class="o">=</span> <span class="n">cast</span><span class="p">(</span><span class="n">JVMView</span><span class="p">,</span> <span class="n">sc</span><span class="o">.</span><span class="n">_jvm</span><span class="p">)</span><span class="o">.</span><span class="n">org</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">sql</span><span class="o">.</span><span class="n">expressions</span><span class="o">.</span><span class="n">Window</span><span class="o">.</span><span class="n">partitionBy</span><span class="p">(</span> |
| <span class="n">_to_java_cols</span><span class="p">(</span><span class="n">cols</span><span class="p">)</span> |
| <span class="p">)</span> |
| <span class="k">return</span> <span class="n">WindowSpec</span><span class="p">(</span><span class="n">jspec</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="Window.orderBy"><a class="viewcode-back" href="../../../reference/pyspark.sql/api/pyspark.sql.Window.orderBy.html#pyspark.sql.Window.orderBy">[docs]</a> <span class="nd">@staticmethod</span> |
| <span class="nd">@try_remote_window</span> |
| <span class="k">def</span> <span class="nf">orderBy</span><span class="p">(</span><span class="o">*</span><span class="n">cols</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="s2">"ColumnOrName"</span><span class="p">,</span> <span class="n">List</span><span class="p">[</span><span class="s2">"ColumnOrName_"</span><span class="p">]])</span> <span class="o">-></span> <span class="s2">"WindowSpec"</span><span class="p">:</span> |
| <span class="w"> </span><span class="sd">"""</span> |
| <span class="sd"> Creates a :class:`WindowSpec` with the ordering defined.</span> |
| |
| <span class="sd"> .. versionadded:: 1.4.0</span> |
| |
| <span class="sd"> Parameters</span> |
| <span class="sd"> ----------</span> |
| <span class="sd"> cols : str, :class:`Column` or list</span> |
| <span class="sd"> names of columns or expressions</span> |
| |
| <span class="sd"> Returns</span> |
| <span class="sd"> -------</span> |
| <span class="sd"> :class: `WindowSpec`</span> |
| <span class="sd"> A :class:`WindowSpec` with the ordering defined.</span> |
| |
| <span class="sd"> Examples</span> |
| <span class="sd"> --------</span> |
| <span class="sd"> >>> from pyspark.sql import Window</span> |
| <span class="sd"> >>> from pyspark.sql.functions import row_number</span> |
| <span class="sd"> >>> df = spark.createDataFrame(</span> |
| <span class="sd"> ... [(1, "a"), (1, "a"), (2, "a"), (1, "b"), (2, "b"), (3, "b")], ["id", "category"])</span> |
| <span class="sd"> >>> df.show()</span> |
| <span class="sd"> +---+--------+</span> |
| <span class="sd"> | id|category|</span> |
| <span class="sd"> +---+--------+</span> |
| <span class="sd"> | 1| a|</span> |
| <span class="sd"> | 1| a|</span> |
| <span class="sd"> | 2| a|</span> |
| <span class="sd"> | 1| b|</span> |
| <span class="sd"> | 2| b|</span> |
| <span class="sd"> | 3| b|</span> |
| <span class="sd"> +---+--------+</span> |
| |
| <span class="sd"> Show row number order by ``category`` in partition ``id``.</span> |
| |
| <span class="sd"> >>> window = Window.partitionBy("id").orderBy("category")</span> |
| <span class="sd"> >>> df.withColumn("row_number", row_number().over(window)).show()</span> |
| <span class="sd"> +---+--------+----------+</span> |
| <span class="sd"> | id|category|row_number|</span> |
| <span class="sd"> +---+--------+----------+</span> |
| <span class="sd"> | 1| a| 1|</span> |
| <span class="sd"> | 1| a| 2|</span> |
| <span class="sd"> | 1| b| 3|</span> |
| <span class="sd"> | 2| a| 1|</span> |
| <span class="sd"> | 2| b| 2|</span> |
| <span class="sd"> | 3| b| 1|</span> |
| <span class="sd"> +---+--------+----------+</span> |
| <span class="sd"> """</span> |
| <span class="n">sc</span> <span class="o">=</span> <span class="n">get_active_spark_context</span><span class="p">()</span> |
| <span class="n">jspec</span> <span class="o">=</span> <span class="n">cast</span><span class="p">(</span><span class="n">JVMView</span><span class="p">,</span> <span class="n">sc</span><span class="o">.</span><span class="n">_jvm</span><span class="p">)</span><span class="o">.</span><span class="n">org</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">sql</span><span class="o">.</span><span class="n">expressions</span><span class="o">.</span><span class="n">Window</span><span class="o">.</span><span class="n">orderBy</span><span class="p">(</span> |
| <span class="n">_to_java_cols</span><span class="p">(</span><span class="n">cols</span><span class="p">)</span> |
| <span class="p">)</span> |
| <span class="k">return</span> <span class="n">WindowSpec</span><span class="p">(</span><span class="n">jspec</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="Window.rowsBetween"><a class="viewcode-back" href="../../../reference/pyspark.sql/api/pyspark.sql.Window.rowsBetween.html#pyspark.sql.Window.rowsBetween">[docs]</a> <span class="nd">@staticmethod</span> |
| <span class="nd">@try_remote_window</span> |
| <span class="k">def</span> <span class="nf">rowsBetween</span><span class="p">(</span><span class="n">start</span><span class="p">:</span> <span class="nb">int</span><span class="p">,</span> <span class="n">end</span><span class="p">:</span> <span class="nb">int</span><span class="p">)</span> <span class="o">-></span> <span class="s2">"WindowSpec"</span><span class="p">:</span> |
| <span class="w"> </span><span class="sd">"""</span> |
| <span class="sd"> Creates a :class:`WindowSpec` with the frame boundaries defined,</span> |
| <span class="sd"> from `start` (inclusive) to `end` (inclusive).</span> |
| |
| <span class="sd"> Both `start` and `end` are relative positions from the current row.</span> |
| <span class="sd"> For example, "0" means "current row", while "-1" means the row before</span> |
| <span class="sd"> the current row, and "5" means the fifth row after the current row.</span> |
| |
| <span class="sd"> We recommend users use ``Window.unboundedPreceding``, ``Window.unboundedFollowing``,</span> |
| <span class="sd"> and ``Window.currentRow`` to specify special boundary values, rather than using integral</span> |
| <span class="sd"> values directly.</span> |
| |
| <span class="sd"> A row based boundary is based on the position of the row within the partition.</span> |
| <span class="sd"> An offset indicates the number of rows above or below the current row, the frame for the</span> |
| <span class="sd"> current row starts or ends. For instance, given a row based sliding frame with a lower bound</span> |
| <span class="sd"> offset of -1 and a upper bound offset of +2. The frame for row with index 5 would range from</span> |
| <span class="sd"> index 4 to index 7.</span> |
| |
| <span class="sd"> .. versionadded:: 2.1.0</span> |
| |
| <span class="sd"> Parameters</span> |
| <span class="sd"> ----------</span> |
| <span class="sd"> start : int</span> |
| <span class="sd"> boundary start, inclusive.</span> |
| <span class="sd"> The frame is unbounded if this is ``Window.unboundedPreceding``, or</span> |
| <span class="sd"> any value less than or equal to -9223372036854775808.</span> |
| <span class="sd"> end : int</span> |
| <span class="sd"> boundary end, inclusive.</span> |
| <span class="sd"> The frame is unbounded if this is ``Window.unboundedFollowing``, or</span> |
| <span class="sd"> any value greater than or equal to 9223372036854775807.</span> |
| |
| <span class="sd"> Returns</span> |
| <span class="sd"> -------</span> |
| <span class="sd"> :class: `WindowSpec`</span> |
| <span class="sd"> A :class:`WindowSpec` with the frame boundaries defined,</span> |
| <span class="sd"> from `start` (inclusive) to `end` (inclusive).</span> |
| |
| <span class="sd"> Examples</span> |
| <span class="sd"> --------</span> |
| <span class="sd"> >>> from pyspark.sql import Window</span> |
| <span class="sd"> >>> from pyspark.sql import functions as func</span> |
| <span class="sd"> >>> df = spark.createDataFrame(</span> |
| <span class="sd"> ... [(1, "a"), (1, "a"), (2, "a"), (1, "b"), (2, "b"), (3, "b")], ["id", "category"])</span> |
| <span class="sd"> >>> df.show()</span> |
| <span class="sd"> +---+--------+</span> |
| <span class="sd"> | id|category|</span> |
| <span class="sd"> +---+--------+</span> |
| <span class="sd"> | 1| a|</span> |
| <span class="sd"> | 1| a|</span> |
| <span class="sd"> | 2| a|</span> |
| <span class="sd"> | 1| b|</span> |
| <span class="sd"> | 2| b|</span> |
| <span class="sd"> | 3| b|</span> |
| <span class="sd"> +---+--------+</span> |
| |
| <span class="sd"> Calculate sum of ``id`` in the range from currentRow to currentRow + 1</span> |
| <span class="sd"> in partition ``category``</span> |
| |
| <span class="sd"> >>> window = Window.partitionBy("category").orderBy("id").rowsBetween(Window.currentRow, 1)</span> |
| <span class="sd"> >>> df.withColumn("sum", func.sum("id").over(window)).sort("id", "category", "sum").show()</span> |
| <span class="sd"> +---+--------+---+</span> |
| <span class="sd"> | id|category|sum|</span> |
| <span class="sd"> +---+--------+---+</span> |
| <span class="sd"> | 1| a| 2|</span> |
| <span class="sd"> | 1| a| 3|</span> |
| <span class="sd"> | 1| b| 3|</span> |
| <span class="sd"> | 2| a| 2|</span> |
| <span class="sd"> | 2| b| 5|</span> |
| <span class="sd"> | 3| b| 3|</span> |
| <span class="sd"> +---+--------+---+</span> |
| |
| <span class="sd"> """</span> |
| <span class="k">if</span> <span class="n">start</span> <span class="o"><=</span> <span class="n">Window</span><span class="o">.</span><span class="n">_PRECEDING_THRESHOLD</span><span class="p">:</span> |
| <span class="n">start</span> <span class="o">=</span> <span class="n">Window</span><span class="o">.</span><span class="n">unboundedPreceding</span> |
| <span class="k">if</span> <span class="n">end</span> <span class="o">>=</span> <span class="n">Window</span><span class="o">.</span><span class="n">_FOLLOWING_THRESHOLD</span><span class="p">:</span> |
| <span class="n">end</span> <span class="o">=</span> <span class="n">Window</span><span class="o">.</span><span class="n">unboundedFollowing</span> |
| <span class="n">sc</span> <span class="o">=</span> <span class="n">get_active_spark_context</span><span class="p">()</span> |
| <span class="n">jspec</span> <span class="o">=</span> <span class="n">cast</span><span class="p">(</span><span class="n">JVMView</span><span class="p">,</span> <span class="n">sc</span><span class="o">.</span><span class="n">_jvm</span><span class="p">)</span><span class="o">.</span><span class="n">org</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">sql</span><span class="o">.</span><span class="n">expressions</span><span class="o">.</span><span class="n">Window</span><span class="o">.</span><span class="n">rowsBetween</span><span class="p">(</span> |
| <span class="n">start</span><span class="p">,</span> <span class="n">end</span> |
| <span class="p">)</span> |
| <span class="k">return</span> <span class="n">WindowSpec</span><span class="p">(</span><span class="n">jspec</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="Window.rangeBetween"><a class="viewcode-back" href="../../../reference/pyspark.sql/api/pyspark.sql.Window.rangeBetween.html#pyspark.sql.Window.rangeBetween">[docs]</a> <span class="nd">@staticmethod</span> |
| <span class="nd">@try_remote_window</span> |
| <span class="k">def</span> <span class="nf">rangeBetween</span><span class="p">(</span><span class="n">start</span><span class="p">:</span> <span class="nb">int</span><span class="p">,</span> <span class="n">end</span><span class="p">:</span> <span class="nb">int</span><span class="p">)</span> <span class="o">-></span> <span class="s2">"WindowSpec"</span><span class="p">:</span> |
| <span class="w"> </span><span class="sd">"""</span> |
| <span class="sd"> Creates a :class:`WindowSpec` with the frame boundaries defined,</span> |
| <span class="sd"> from `start` (inclusive) to `end` (inclusive).</span> |
| |
| <span class="sd"> Both `start` and `end` are relative from the current row. For example,</span> |
| <span class="sd"> "0" means "current row", while "-1" means one off before the current row,</span> |
| <span class="sd"> and "5" means the five off after the current row.</span> |
| |
| <span class="sd"> We recommend users use ``Window.unboundedPreceding``, ``Window.unboundedFollowing``,</span> |
| <span class="sd"> and ``Window.currentRow`` to specify special boundary values, rather than using integral</span> |
| <span class="sd"> values directly.</span> |
| |
| <span class="sd"> A range-based boundary is based on the actual value of the ORDER BY</span> |
| <span class="sd"> expression(s). An offset is used to alter the value of the ORDER BY expression, for</span> |
| <span class="sd"> instance if the current ORDER BY expression has a value of 10 and the lower bound offset</span> |
| <span class="sd"> is -3, the resulting lower bound for the current row will be 10 - 3 = 7. This however puts a</span> |
| <span class="sd"> number of constraints on the ORDER BY expressions: there can be only one expression and this</span> |
| <span class="sd"> expression must have a numerical data type. An exception can be made when the offset is</span> |
| <span class="sd"> unbounded, because no value modification is needed, in this case multiple and non-numeric</span> |
| <span class="sd"> ORDER BY expression are allowed.</span> |
| |
| <span class="sd"> .. versionadded:: 2.1.0</span> |
| |
| <span class="sd"> Parameters</span> |
| <span class="sd"> ----------</span> |
| <span class="sd"> start : int</span> |
| <span class="sd"> boundary start, inclusive.</span> |
| <span class="sd"> The frame is unbounded if this is ``Window.unboundedPreceding``, or</span> |
| <span class="sd"> any value less than or equal to max(-sys.maxsize, -9223372036854775808).</span> |
| <span class="sd"> end : int</span> |
| <span class="sd"> boundary end, inclusive.</span> |
| <span class="sd"> The frame is unbounded if this is ``Window.unboundedFollowing``, or</span> |
| <span class="sd"> any value greater than or equal to min(sys.maxsize, 9223372036854775807).</span> |
| |
| <span class="sd"> Returns</span> |
| <span class="sd"> -------</span> |
| <span class="sd"> :class: `WindowSpec`</span> |
| <span class="sd"> A :class:`WindowSpec` with the frame boundaries defined,</span> |
| <span class="sd"> from `start` (inclusive) to `end` (inclusive).</span> |
| |
| <span class="sd"> Examples</span> |
| <span class="sd"> --------</span> |
| <span class="sd"> >>> from pyspark.sql import Window</span> |
| <span class="sd"> >>> from pyspark.sql import functions as func</span> |
| <span class="sd"> >>> df = spark.createDataFrame(</span> |
| <span class="sd"> ... [(1, "a"), (1, "a"), (2, "a"), (1, "b"), (2, "b"), (3, "b")], ["id", "category"])</span> |
| <span class="sd"> >>> df.show()</span> |
| <span class="sd"> +---+--------+</span> |
| <span class="sd"> | id|category|</span> |
| <span class="sd"> +---+--------+</span> |
| <span class="sd"> | 1| a|</span> |
| <span class="sd"> | 1| a|</span> |
| <span class="sd"> | 2| a|</span> |
| <span class="sd"> | 1| b|</span> |
| <span class="sd"> | 2| b|</span> |
| <span class="sd"> | 3| b|</span> |
| <span class="sd"> +---+--------+</span> |
| |
| <span class="sd"> Calculate sum of ``id`` in the range from ``id`` of currentRow to ``id`` of currentRow + 1</span> |
| <span class="sd"> in partition ``category``</span> |
| |
| <span class="sd"> >>> window = Window.partitionBy("category").orderBy("id").rangeBetween(Window.currentRow, 1)</span> |
| <span class="sd"> >>> df.withColumn("sum", func.sum("id").over(window)).sort("id", "category").show()</span> |
| <span class="sd"> +---+--------+---+</span> |
| <span class="sd"> | id|category|sum|</span> |
| <span class="sd"> +---+--------+---+</span> |
| <span class="sd"> | 1| a| 4|</span> |
| <span class="sd"> | 1| a| 4|</span> |
| <span class="sd"> | 1| b| 3|</span> |
| <span class="sd"> | 2| a| 2|</span> |
| <span class="sd"> | 2| b| 5|</span> |
| <span class="sd"> | 3| b| 3|</span> |
| <span class="sd"> +---+--------+---+</span> |
| |
| <span class="sd"> """</span> |
| <span class="k">if</span> <span class="n">start</span> <span class="o"><=</span> <span class="n">Window</span><span class="o">.</span><span class="n">_PRECEDING_THRESHOLD</span><span class="p">:</span> |
| <span class="n">start</span> <span class="o">=</span> <span class="n">Window</span><span class="o">.</span><span class="n">unboundedPreceding</span> |
| <span class="k">if</span> <span class="n">end</span> <span class="o">>=</span> <span class="n">Window</span><span class="o">.</span><span class="n">_FOLLOWING_THRESHOLD</span><span class="p">:</span> |
| <span class="n">end</span> <span class="o">=</span> <span class="n">Window</span><span class="o">.</span><span class="n">unboundedFollowing</span> |
| <span class="n">sc</span> <span class="o">=</span> <span class="n">get_active_spark_context</span><span class="p">()</span> |
| <span class="n">jspec</span> <span class="o">=</span> <span class="n">cast</span><span class="p">(</span><span class="n">JVMView</span><span class="p">,</span> <span class="n">sc</span><span class="o">.</span><span class="n">_jvm</span><span class="p">)</span><span class="o">.</span><span class="n">org</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">sql</span><span class="o">.</span><span class="n">expressions</span><span class="o">.</span><span class="n">Window</span><span class="o">.</span><span class="n">rangeBetween</span><span class="p">(</span> |
| <span class="n">start</span><span class="p">,</span> <span class="n">end</span> |
| <span class="p">)</span> |
| <span class="k">return</span> <span class="n">WindowSpec</span><span class="p">(</span><span class="n">jspec</span><span class="p">)</span></div></div> |
| |
| |
| <span class="k">class</span> <span class="nc">WindowSpec</span><span class="p">:</span> |
| <span class="w"> </span><span class="sd">"""</span> |
| <span class="sd"> A window specification that defines the partitioning, ordering,</span> |
| <span class="sd"> and frame boundaries.</span> |
| |
| <span class="sd"> Use the static methods in :class:`Window` to create a :class:`WindowSpec`.</span> |
| |
| <span class="sd"> .. versionadded:: 1.4.0</span> |
| |
| <span class="sd"> .. versionchanged:: 3.4.0</span> |
| <span class="sd"> Supports Spark Connect.</span> |
| <span class="sd"> """</span> |
| |
| <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">jspec</span><span class="p">:</span> <span class="n">JavaObject</span><span class="p">)</span> <span class="o">-></span> <span class="kc">None</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">_jspec</span> <span class="o">=</span> <span class="n">jspec</span> |
| |
| <div class="viewcode-block" id="WindowSpec.partitionBy"><a class="viewcode-back" href="../../../reference/pyspark.sql/api/pyspark.sql.WindowSpec.partitionBy.html#pyspark.sql.WindowSpec.partitionBy">[docs]</a> <span class="nd">@try_remote_windowspec</span> |
| <span class="k">def</span> <span class="nf">partitionBy</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">cols</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="s2">"ColumnOrName"</span><span class="p">,</span> <span class="n">List</span><span class="p">[</span><span class="s2">"ColumnOrName_"</span><span class="p">]])</span> <span class="o">-></span> <span class="s2">"WindowSpec"</span><span class="p">:</span> |
| <span class="w"> </span><span class="sd">"""</span> |
| <span class="sd"> Defines the partitioning columns in a :class:`WindowSpec`.</span> |
| |
| <span class="sd"> .. versionadded:: 1.4.0</span> |
| |
| <span class="sd"> Parameters</span> |
| <span class="sd"> ----------</span> |
| <span class="sd"> cols : str, :class:`Column` or list</span> |
| <span class="sd"> names of columns or expressions</span> |
| <span class="sd"> """</span> |
| <span class="k">return</span> <span class="n">WindowSpec</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_jspec</span><span class="o">.</span><span class="n">partitionBy</span><span class="p">(</span><span class="n">_to_java_cols</span><span class="p">(</span><span class="n">cols</span><span class="p">)))</span></div> |
| |
| <div class="viewcode-block" id="WindowSpec.orderBy"><a class="viewcode-back" href="../../../reference/pyspark.sql/api/pyspark.sql.WindowSpec.orderBy.html#pyspark.sql.WindowSpec.orderBy">[docs]</a> <span class="nd">@try_remote_windowspec</span> |
| <span class="k">def</span> <span class="nf">orderBy</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">cols</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="s2">"ColumnOrName"</span><span class="p">,</span> <span class="n">List</span><span class="p">[</span><span class="s2">"ColumnOrName_"</span><span class="p">]])</span> <span class="o">-></span> <span class="s2">"WindowSpec"</span><span class="p">:</span> |
| <span class="w"> </span><span class="sd">"""</span> |
| <span class="sd"> Defines the ordering columns in a :class:`WindowSpec`.</span> |
| |
| <span class="sd"> .. versionadded:: 1.4.0</span> |
| |
| <span class="sd"> Parameters</span> |
| <span class="sd"> ----------</span> |
| <span class="sd"> cols : str, :class:`Column` or list</span> |
| <span class="sd"> names of columns or expressions</span> |
| <span class="sd"> """</span> |
| <span class="k">return</span> <span class="n">WindowSpec</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_jspec</span><span class="o">.</span><span class="n">orderBy</span><span class="p">(</span><span class="n">_to_java_cols</span><span class="p">(</span><span class="n">cols</span><span class="p">)))</span></div> |
| |
| <div class="viewcode-block" id="WindowSpec.rowsBetween"><a class="viewcode-back" href="../../../reference/pyspark.sql/api/pyspark.sql.WindowSpec.rowsBetween.html#pyspark.sql.WindowSpec.rowsBetween">[docs]</a> <span class="nd">@try_remote_windowspec</span> |
| <span class="k">def</span> <span class="nf">rowsBetween</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">start</span><span class="p">:</span> <span class="nb">int</span><span class="p">,</span> <span class="n">end</span><span class="p">:</span> <span class="nb">int</span><span class="p">)</span> <span class="o">-></span> <span class="s2">"WindowSpec"</span><span class="p">:</span> |
| <span class="w"> </span><span class="sd">"""</span> |
| <span class="sd"> Defines the frame boundaries, from `start` (inclusive) to `end` (inclusive).</span> |
| |
| <span class="sd"> Both `start` and `end` are relative positions from the current row.</span> |
| <span class="sd"> For example, "0" means "current row", while "-1" means the row before</span> |
| <span class="sd"> the current row, and "5" means the fifth row after the current row.</span> |
| |
| <span class="sd"> We recommend users use ``Window.unboundedPreceding``, ``Window.unboundedFollowing``,</span> |
| <span class="sd"> and ``Window.currentRow`` to specify special boundary values, rather than using integral</span> |
| <span class="sd"> values directly.</span> |
| |
| <span class="sd"> .. versionadded:: 1.4.0</span> |
| |
| <span class="sd"> Parameters</span> |
| <span class="sd"> ----------</span> |
| <span class="sd"> start : int</span> |
| <span class="sd"> boundary start, inclusive.</span> |
| <span class="sd"> The frame is unbounded if this is ``Window.unboundedPreceding``, or</span> |
| <span class="sd"> any value less than or equal to max(-sys.maxsize, -9223372036854775808).</span> |
| <span class="sd"> end : int</span> |
| <span class="sd"> boundary end, inclusive.</span> |
| <span class="sd"> The frame is unbounded if this is ``Window.unboundedFollowing``, or</span> |
| <span class="sd"> any value greater than or equal to min(sys.maxsize, 9223372036854775807).</span> |
| <span class="sd"> """</span> |
| <span class="k">if</span> <span class="n">start</span> <span class="o"><=</span> <span class="n">Window</span><span class="o">.</span><span class="n">_PRECEDING_THRESHOLD</span><span class="p">:</span> |
| <span class="n">start</span> <span class="o">=</span> <span class="n">Window</span><span class="o">.</span><span class="n">unboundedPreceding</span> |
| <span class="k">if</span> <span class="n">end</span> <span class="o">>=</span> <span class="n">Window</span><span class="o">.</span><span class="n">_FOLLOWING_THRESHOLD</span><span class="p">:</span> |
| <span class="n">end</span> <span class="o">=</span> <span class="n">Window</span><span class="o">.</span><span class="n">unboundedFollowing</span> |
| <span class="k">return</span> <span class="n">WindowSpec</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_jspec</span><span class="o">.</span><span class="n">rowsBetween</span><span class="p">(</span><span class="n">start</span><span class="p">,</span> <span class="n">end</span><span class="p">))</span></div> |
| |
| <div class="viewcode-block" id="WindowSpec.rangeBetween"><a class="viewcode-back" href="../../../reference/pyspark.sql/api/pyspark.sql.WindowSpec.rangeBetween.html#pyspark.sql.WindowSpec.rangeBetween">[docs]</a> <span class="nd">@try_remote_windowspec</span> |
| <span class="k">def</span> <span class="nf">rangeBetween</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">start</span><span class="p">:</span> <span class="nb">int</span><span class="p">,</span> <span class="n">end</span><span class="p">:</span> <span class="nb">int</span><span class="p">)</span> <span class="o">-></span> <span class="s2">"WindowSpec"</span><span class="p">:</span> |
| <span class="w"> </span><span class="sd">"""</span> |
| <span class="sd"> Defines the frame boundaries, from `start` (inclusive) to `end` (inclusive).</span> |
| |
| <span class="sd"> Both `start` and `end` are relative from the current row. For example,</span> |
| <span class="sd"> "0" means "current row", while "-1" means one off before the current row,</span> |
| <span class="sd"> and "5" means the five off after the current row.</span> |
| |
| <span class="sd"> We recommend users use ``Window.unboundedPreceding``, ``Window.unboundedFollowing``,</span> |
| <span class="sd"> and ``Window.currentRow`` to specify special boundary values, rather than using integral</span> |
| <span class="sd"> values directly.</span> |
| |
| <span class="sd"> .. versionadded:: 1.4.0</span> |
| |
| <span class="sd"> Parameters</span> |
| <span class="sd"> ----------</span> |
| <span class="sd"> start : int</span> |
| <span class="sd"> boundary start, inclusive.</span> |
| <span class="sd"> The frame is unbounded if this is ``Window.unboundedPreceding``, or</span> |
| <span class="sd"> any value less than or equal to max(-sys.maxsize, -9223372036854775808).</span> |
| <span class="sd"> end : int</span> |
| <span class="sd"> boundary end, inclusive.</span> |
| <span class="sd"> The frame is unbounded if this is ``Window.unboundedFollowing``, or</span> |
| <span class="sd"> any value greater than or equal to min(sys.maxsize, 9223372036854775807).</span> |
| <span class="sd"> """</span> |
| <span class="k">if</span> <span class="n">start</span> <span class="o"><=</span> <span class="n">Window</span><span class="o">.</span><span class="n">_PRECEDING_THRESHOLD</span><span class="p">:</span> |
| <span class="n">start</span> <span class="o">=</span> <span class="n">Window</span><span class="o">.</span><span class="n">unboundedPreceding</span> |
| <span class="k">if</span> <span class="n">end</span> <span class="o">>=</span> <span class="n">Window</span><span class="o">.</span><span class="n">_FOLLOWING_THRESHOLD</span><span class="p">:</span> |
| <span class="n">end</span> <span class="o">=</span> <span class="n">Window</span><span class="o">.</span><span class="n">unboundedFollowing</span> |
| <span class="k">return</span> <span class="n">WindowSpec</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_jspec</span><span class="o">.</span><span class="n">rangeBetween</span><span class="p">(</span><span class="n">start</span><span class="p">,</span> <span class="n">end</span><span class="p">))</span></div> |
| |
| |
| <span class="k">def</span> <span class="nf">_test</span><span class="p">()</span> <span class="o">-></span> <span class="kc">None</span><span class="p">:</span> |
| <span class="kn">import</span> <span class="nn">doctest</span> |
| <span class="kn">from</span> <span class="nn">pyspark.sql</span> <span class="kn">import</span> <span class="n">SparkSession</span> |
| <span class="kn">import</span> <span class="nn">pyspark.sql.window</span> |
| |
| <span class="n">globs</span> <span class="o">=</span> <span class="n">pyspark</span><span class="o">.</span><span class="n">sql</span><span class="o">.</span><span class="n">window</span><span class="o">.</span><span class="vm">__dict__</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span> |
| <span class="n">spark</span> <span class="o">=</span> <span class="n">SparkSession</span><span class="o">.</span><span class="n">builder</span><span class="o">.</span><span class="n">master</span><span class="p">(</span><span class="s2">"local[4]"</span><span class="p">)</span><span class="o">.</span><span class="n">appName</span><span class="p">(</span><span class="s2">"sql.window tests"</span><span class="p">)</span><span class="o">.</span><span class="n">getOrCreate</span><span class="p">()</span> |
| <span class="n">globs</span><span class="p">[</span><span class="s2">"spark"</span><span class="p">]</span> <span class="o">=</span> <span class="n">spark</span> |
| |
| <span class="p">(</span><span class="n">failure_count</span><span class="p">,</span> <span class="n">test_count</span><span class="p">)</span> <span class="o">=</span> <span class="n">doctest</span><span class="o">.</span><span class="n">testmod</span><span class="p">(</span> |
| <span class="n">pyspark</span><span class="o">.</span><span class="n">sql</span><span class="o">.</span><span class="n">window</span><span class="p">,</span> <span class="n">globs</span><span class="o">=</span><span class="n">globs</span><span class="p">,</span> <span class="n">optionflags</span><span class="o">=</span><span class="n">doctest</span><span class="o">.</span><span class="n">NORMALIZE_WHITESPACE</span> |
| <span class="p">)</span> |
| <span class="n">spark</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span> |
| <span class="k">if</span> <span class="n">failure_count</span><span class="p">:</span> |
| <span class="n">sys</span><span class="o">.</span><span class="n">exit</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span> |
| |
| |
| <span class="k">if</span> <span class="vm">__name__</span> <span class="o">==</span> <span class="s2">"__main__"</span><span class="p">:</span> |
| <span class="n">_test</span><span class="p">()</span> |
| </pre></div> |
| |
| </div> |
| |
| |
| <div class='prev-next-bottom'> |
| |
| |
| </div> |
| |
| </main> |
| |
| |
| </div> |
| </div> |
| |
| |
| <script src="../../../_static/js/index.3da636dd464baa7582d2.js"></script> |
| |
| |
| <footer class="footer mt-5 mt-md-0"> |
| <div class="container"> |
| <p> |
| © Copyright .<br/> |
| Created using <a href="http://sphinx-doc.org/">Sphinx</a> 3.0.4.<br/> |
| </p> |
| </div> |
| </footer> |
| </body> |
| </html> |