|  | 
 | <!DOCTYPE html> | 
 |  | 
 | <html> | 
 |   <head> | 
 |     <meta charset="utf-8" /> | 
 |     <title>Structured Streaming — PySpark 3.2.3 documentation</title> | 
 |      | 
 |   <link rel="stylesheet" href="../_static/css/index.73d71520a4ca3b99cfee5594769eaaae.css"> | 
 |  | 
 |      | 
 |   <link rel="stylesheet" | 
 |     href="../_static/vendor/fontawesome/5.13.0/css/all.min.css"> | 
 |   <link rel="preload" as="font" type="font/woff2" crossorigin | 
 |     href="../_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff2"> | 
 |   <link rel="preload" as="font" type="font/woff2" crossorigin | 
 |     href="../_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff2"> | 
 |  | 
 |      | 
 |        | 
 |   <link rel="stylesheet" | 
 |     href="../_static/vendor/open-sans_all/1.44.1/index.css"> | 
 |   <link rel="stylesheet" | 
 |     href="../_static/vendor/lato_latin-ext/1.44.1/index.css"> | 
 |  | 
 |      | 
 |     <link rel="stylesheet" href="../_static/basic.css" type="text/css" /> | 
 |     <link rel="stylesheet" href="../_static/pygments.css" type="text/css" /> | 
 |     <link rel="stylesheet" type="text/css" href="../_static/css/pyspark.css" /> | 
 |      | 
 |   <link rel="preload" as="script" href="../_static/js/index.3da636dd464baa7582d2.js"> | 
 |  | 
 |     <script id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script> | 
 |     <script src="../_static/jquery.js"></script> | 
 |     <script src="../_static/underscore.js"></script> | 
 |     <script src="../_static/doctools.js"></script> | 
 |     <script src="../_static/language_data.js"></script> | 
 |     <script src="../_static/copybutton.js"></script> | 
 |     <script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script> | 
 |     <script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script> | 
 |     <script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script> | 
 |     <link rel="search" title="Search" href="../search.html" /> | 
 |     <link rel="next" title="pyspark.sql.streaming.DataStreamReader" href="api/pyspark.sql.streaming.DataStreamReader.html" /> | 
 |     <link rel="prev" title="pyspark.pandas.extensions.register_index_accessor" href="pyspark.pandas/api/pyspark.pandas.extensions.register_index_accessor.html" /> | 
 |     <meta name="viewport" content="width=device-width, initial-scale=1" /> | 
 |     <meta name="docsearch:language" content="en" /> | 
 |   </head> | 
 |   <body data-spy="scroll" data-target="#bd-toc-nav" data-offset="80"> | 
 |      | 
 |     <nav class="navbar navbar-light navbar-expand-lg bg-light fixed-top bd-navbar" id="navbar-main"> | 
 | <div class="container-xl"> | 
 |  | 
 |     <a class="navbar-brand" href="../index.html"> | 
 |      | 
 |       <img src="../_static/spark-logo-reverse.png" class="logo" alt="logo" /> | 
 |      | 
 |     </a> | 
 |     <button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbar-menu" aria-controls="navbar-menu" aria-expanded="false" aria-label="Toggle navigation"> | 
 |         <span class="navbar-toggler-icon"></span> | 
 |     </button> | 
 |  | 
 |     <div id="navbar-menu" class="col-lg-9 collapse navbar-collapse"> | 
 |       <ul id="navbar-main-elements" class="navbar-nav mr-auto"> | 
 |          | 
 |          | 
 |         <li class="nav-item "> | 
 |             <a class="nav-link" href="../getting_started/index.html">Getting Started</a> | 
 |         </li> | 
 |          | 
 |         <li class="nav-item "> | 
 |             <a class="nav-link" href="../user_guide/index.html">User Guide</a> | 
 |         </li> | 
 |          | 
 |         <li class="nav-item active"> | 
 |             <a class="nav-link" href="index.html">API Reference</a> | 
 |         </li> | 
 |          | 
 |         <li class="nav-item "> | 
 |             <a class="nav-link" href="../development/index.html">Development</a> | 
 |         </li> | 
 |          | 
 |         <li class="nav-item "> | 
 |             <a class="nav-link" href="../migration_guide/index.html">Migration Guide</a> | 
 |         </li> | 
 |          | 
 |          | 
 |       </ul> | 
 |  | 
 |  | 
 |        | 
 |  | 
 |       <ul class="navbar-nav"> | 
 |          | 
 |          | 
 |       </ul> | 
 |     </div> | 
 | </div> | 
 |     </nav> | 
 |      | 
 |  | 
 |     <div class="container-xl"> | 
 |       <div class="row"> | 
 |            | 
 |           <div class="col-12 col-md-3 bd-sidebar"><form class="bd-search d-flex align-items-center" action="../search.html" method="get"> | 
 |   <i class="icon fas fa-search"></i> | 
 |   <input type="search" class="form-control" name="q" id="search-input" placeholder="Search the docs ..." aria-label="Search the docs ..." autocomplete="off" > | 
 | </form> | 
 | <nav class="bd-links" id="bd-docs-nav" aria-label="Main navigation"> | 
 |  | 
 |     <div class="bd-toc-item active"> | 
 |      | 
 |    | 
 |     <ul class="nav bd-sidenav"> | 
 |          | 
 |          | 
 |          | 
 |          | 
 |          | 
 |          | 
 |            | 
 |              | 
 |                 <li class=""> | 
 |                     <a href="pyspark.sql.html">Spark SQL</a> | 
 |                 </li> | 
 |              | 
 |            | 
 |              | 
 |                 <li class=""> | 
 |                     <a href="pyspark.pandas/index.html">Pandas API on Spark</a> | 
 |                 </li> | 
 |              | 
 |            | 
 |              | 
 |                 <li class="active"> | 
 |                     <a href="">Structured Streaming</a> | 
 |                 </li> | 
 |              | 
 |            | 
 |              | 
 |                 <li class=""> | 
 |                     <a href="pyspark.ml.html">MLlib (DataFrame-based)</a> | 
 |                 </li> | 
 |              | 
 |            | 
 |              | 
 |                 <li class=""> | 
 |                     <a href="pyspark.streaming.html">Spark Streaming</a> | 
 |                 </li> | 
 |              | 
 |            | 
 |              | 
 |                 <li class=""> | 
 |                     <a href="pyspark.mllib.html">MLlib (RDD-based)</a> | 
 |                 </li> | 
 |              | 
 |            | 
 |              | 
 |                 <li class=""> | 
 |                     <a href="pyspark.html">Spark Core</a> | 
 |                 </li> | 
 |              | 
 |            | 
 |              | 
 |                 <li class=""> | 
 |                     <a href="pyspark.resource.html">Resource Management</a> | 
 |                 </li> | 
 |              | 
 |            | 
 |          | 
 |          | 
 |          | 
 |          | 
 |          | 
 |          | 
 |       </ul> | 
 |    | 
 |   </nav> | 
 |           </div> | 
 |            | 
 |  | 
 |            | 
 |           <div class="d-none d-xl-block col-xl-2 bd-toc"> | 
 |                | 
 | <div class="tocsection onthispage pt-5 pb-3"> | 
 |     <i class="fas fa-list"></i> On this page | 
 | </div> | 
 |  | 
 | <nav id="bd-toc-nav"> | 
 |     <ul class="nav section-nav flex-column"> | 
 |      | 
 |         <li class="nav-item toc-entry toc-h2"> | 
 |             <a href="#core-classes" class="nav-link">Core Classes</a> | 
 |         </li> | 
 |      | 
 |         <li class="nav-item toc-entry toc-h2"> | 
 |             <a href="#input-and-output" class="nav-link">Input and Output</a> | 
 |         </li> | 
 |      | 
 |         <li class="nav-item toc-entry toc-h2"> | 
 |             <a href="#query-management" class="nav-link">Query Management</a> | 
 |         </li> | 
 |      | 
 |     </ul> | 
 | </nav> | 
 |  | 
 |  | 
 |                | 
 |           </div> | 
 |            | 
 |  | 
 |            | 
 |           <main class="col-12 col-md-9 col-xl-7 py-md-5 pl-md-5 pr-md-4 bd-content" role="main"> | 
 |                | 
 |               <div> | 
 |                  | 
 |   <div class="section" id="structured-streaming"> | 
 | <h1>Structured Streaming<a class="headerlink" href="#structured-streaming" title="Permalink to this headline">¶</a></h1> | 
 | <div class="section" id="core-classes"> | 
 | <h2>Core Classes<a class="headerlink" href="#core-classes" title="Permalink to this headline">¶</a></h2> | 
 | <table class="longtable table autosummary"> | 
 | <colgroup> | 
 | <col style="width: 10%" /> | 
 | <col style="width: 90%" /> | 
 | </colgroup> | 
 | <tbody> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.html#pyspark.sql.streaming.DataStreamReader" title="pyspark.sql.streaming.DataStreamReader"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader</span></code></a>(spark)</p></td> | 
 | <td><p>Interface used to load a streaming <a class="reference internal" href="api/pyspark.sql.DataFrame.html#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code></a> from external storage systems (e.g.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.html#pyspark.sql.streaming.DataStreamWriter" title="pyspark.sql.streaming.DataStreamWriter"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter</span></code></a>(df)</p></td> | 
 | <td><p>Interface used to write a streaming <a class="reference internal" href="api/pyspark.sql.DataFrame.html#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code></a> to external storage systems (e.g.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.ForeachBatchFunction.html#pyspark.sql.streaming.ForeachBatchFunction" title="pyspark.sql.streaming.ForeachBatchFunction"><code class="xref py py-obj docutils literal notranslate"><span class="pre">ForeachBatchFunction</span></code></a>(sql_ctx, func)</p></td> | 
 | <td><p>This is the Python implementation of Java interface ‘ForeachBatchFunction’.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.html#pyspark.sql.streaming.StreamingQuery" title="pyspark.sql.streaming.StreamingQuery"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery</span></code></a>(jsq)</p></td> | 
 | <td><p>A handle to a query that is executing continuously in the background as new data arrives.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQueryException.html#pyspark.sql.streaming.StreamingQueryException" title="pyspark.sql.streaming.StreamingQueryException"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQueryException</span></code></a>(desc, stackTrace[, …])</p></td> | 
 | <td><p>Exception that stopped a <a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.html#pyspark.sql.streaming.StreamingQuery" title="pyspark.sql.streaming.StreamingQuery"><code class="xref py py-class docutils literal notranslate"><span class="pre">StreamingQuery</span></code></a>.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQueryManager.html#pyspark.sql.streaming.StreamingQueryManager" title="pyspark.sql.streaming.StreamingQueryManager"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQueryManager</span></code></a>(jsqm)</p></td> | 
 | <td><p>A class to manage all the <a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.html#pyspark.sql.streaming.StreamingQuery" title="pyspark.sql.streaming.StreamingQuery"><code class="xref py py-class docutils literal notranslate"><span class="pre">StreamingQuery</span></code></a> StreamingQueries active.</p></td> | 
 | </tr> | 
 | </tbody> | 
 | </table> | 
 | </div> | 
 | <div class="section" id="input-and-output"> | 
 | <h2>Input and Output<a class="headerlink" href="#input-and-output" title="Permalink to this headline">¶</a></h2> | 
 | <table class="longtable table autosummary"> | 
 | <colgroup> | 
 | <col style="width: 10%" /> | 
 | <col style="width: 90%" /> | 
 | </colgroup> | 
 | <tbody> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.csv.html#pyspark.sql.streaming.DataStreamReader.csv" title="pyspark.sql.streaming.DataStreamReader.csv"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.csv</span></code></a>(path[, schema, sep, …])</p></td> | 
 | <td><p>Loads a CSV file stream and returns the result as a <code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code>.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.format.html#pyspark.sql.streaming.DataStreamReader.format" title="pyspark.sql.streaming.DataStreamReader.format"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.format</span></code></a>(source)</p></td> | 
 | <td><p>Specifies the input data source format.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.json.html#pyspark.sql.streaming.DataStreamReader.json" title="pyspark.sql.streaming.DataStreamReader.json"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.json</span></code></a>(path[, schema, …])</p></td> | 
 | <td><p>Loads a JSON file stream and returns the results as a <code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code>.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.load.html#pyspark.sql.streaming.DataStreamReader.load" title="pyspark.sql.streaming.DataStreamReader.load"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.load</span></code></a>([path, format, schema])</p></td> | 
 | <td><p>Loads a data stream from a data source and returns it as a <a class="reference internal" href="api/pyspark.sql.DataFrame.html#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code></a>.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.option.html#pyspark.sql.streaming.DataStreamReader.option" title="pyspark.sql.streaming.DataStreamReader.option"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.option</span></code></a>(key, value)</p></td> | 
 | <td><p>Adds an input option for the underlying data source.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.options.html#pyspark.sql.streaming.DataStreamReader.options" title="pyspark.sql.streaming.DataStreamReader.options"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.options</span></code></a>(**options)</p></td> | 
 | <td><p>Adds input options for the underlying data source.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.orc.html#pyspark.sql.streaming.DataStreamReader.orc" title="pyspark.sql.streaming.DataStreamReader.orc"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.orc</span></code></a>(path[, mergeSchema, …])</p></td> | 
 | <td><p>Loads a ORC file stream, returning the result as a <code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code>.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.parquet.html#pyspark.sql.streaming.DataStreamReader.parquet" title="pyspark.sql.streaming.DataStreamReader.parquet"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.parquet</span></code></a>(path[, …])</p></td> | 
 | <td><p>Loads a Parquet file stream, returning the result as a <code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code>.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.schema.html#pyspark.sql.streaming.DataStreamReader.schema" title="pyspark.sql.streaming.DataStreamReader.schema"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.schema</span></code></a>(schema)</p></td> | 
 | <td><p>Specifies the input schema.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.table.html#pyspark.sql.streaming.DataStreamReader.table" title="pyspark.sql.streaming.DataStreamReader.table"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.table</span></code></a>(tableName)</p></td> | 
 | <td><p>Define a Streaming DataFrame on a Table.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamReader.text.html#pyspark.sql.streaming.DataStreamReader.text" title="pyspark.sql.streaming.DataStreamReader.text"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamReader.text</span></code></a>(path[, wholetext, …])</p></td> | 
 | <td><p>Loads a text file stream and returns a <code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code> whose schema starts with a string column named “value”, and followed by partitioned columns if there are any.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.foreach.html#pyspark.sql.streaming.DataStreamWriter.foreach" title="pyspark.sql.streaming.DataStreamWriter.foreach"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.foreach</span></code></a>(f)</p></td> | 
 | <td><p>Sets the output of the streaming query to be processed using the provided writer <code class="docutils literal notranslate"><span class="pre">f</span></code>.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.foreachBatch.html#pyspark.sql.streaming.DataStreamWriter.foreachBatch" title="pyspark.sql.streaming.DataStreamWriter.foreachBatch"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.foreachBatch</span></code></a>(func)</p></td> | 
 | <td><p>Sets the output of the streaming query to be processed using the provided function.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.format.html#pyspark.sql.streaming.DataStreamWriter.format" title="pyspark.sql.streaming.DataStreamWriter.format"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.format</span></code></a>(source)</p></td> | 
 | <td><p>Specifies the underlying output data source.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.option.html#pyspark.sql.streaming.DataStreamWriter.option" title="pyspark.sql.streaming.DataStreamWriter.option"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.option</span></code></a>(key, value)</p></td> | 
 | <td><p>Adds an output option for the underlying data source.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.options.html#pyspark.sql.streaming.DataStreamWriter.options" title="pyspark.sql.streaming.DataStreamWriter.options"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.options</span></code></a>(**options)</p></td> | 
 | <td><p>Adds output options for the underlying data source.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.outputMode.html#pyspark.sql.streaming.DataStreamWriter.outputMode" title="pyspark.sql.streaming.DataStreamWriter.outputMode"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.outputMode</span></code></a>(outputMode)</p></td> | 
 | <td><p>Specifies how data of a streaming DataFrame/Dataset is written to a streaming sink.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.partitionBy.html#pyspark.sql.streaming.DataStreamWriter.partitionBy" title="pyspark.sql.streaming.DataStreamWriter.partitionBy"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.partitionBy</span></code></a>(*cols)</p></td> | 
 | <td><p>Partitions the output by the given columns on the file system.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.queryName.html#pyspark.sql.streaming.DataStreamWriter.queryName" title="pyspark.sql.streaming.DataStreamWriter.queryName"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.queryName</span></code></a>(queryName)</p></td> | 
 | <td><p>Specifies the name of the <a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.html#pyspark.sql.streaming.StreamingQuery" title="pyspark.sql.streaming.StreamingQuery"><code class="xref py py-class docutils literal notranslate"><span class="pre">StreamingQuery</span></code></a> that can be started with <code class="xref py py-func docutils literal notranslate"><span class="pre">start()</span></code>.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.start.html#pyspark.sql.streaming.DataStreamWriter.start" title="pyspark.sql.streaming.DataStreamWriter.start"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.start</span></code></a>([path, format, …])</p></td> | 
 | <td><p>Streams the contents of the <code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code> to a data source.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.toTable.html#pyspark.sql.streaming.DataStreamWriter.toTable" title="pyspark.sql.streaming.DataStreamWriter.toTable"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.toTable</span></code></a>(tableName[, …])</p></td> | 
 | <td><p>Starts the execution of the streaming query, which will continually output results to the given table as new data arrives.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.DataStreamWriter.trigger.html#pyspark.sql.streaming.DataStreamWriter.trigger" title="pyspark.sql.streaming.DataStreamWriter.trigger"><code class="xref py py-obj docutils literal notranslate"><span class="pre">DataStreamWriter.trigger</span></code></a>(*[, …])</p></td> | 
 | <td><p>Set the trigger for the stream query.</p></td> | 
 | </tr> | 
 | </tbody> | 
 | </table> | 
 | </div> | 
 | <div class="section" id="query-management"> | 
 | <h2>Query Management<a class="headerlink" href="#query-management" title="Permalink to this headline">¶</a></h2> | 
 | <table class="longtable table autosummary"> | 
 | <colgroup> | 
 | <col style="width: 10%" /> | 
 | <col style="width: 90%" /> | 
 | </colgroup> | 
 | <tbody> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.awaitTermination.html#pyspark.sql.streaming.StreamingQuery.awaitTermination" title="pyspark.sql.streaming.StreamingQuery.awaitTermination"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.awaitTermination</span></code></a>([timeout])</p></td> | 
 | <td><p>Waits for the termination of <cite>this</cite> query, either by <code class="xref py py-func docutils literal notranslate"><span class="pre">query.stop()</span></code> or by an exception.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.exception.html#pyspark.sql.streaming.StreamingQuery.exception" title="pyspark.sql.streaming.StreamingQuery.exception"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.exception</span></code></a>()</p></td> | 
 | <td><p><div class="versionadded"> | 
 | <p><span class="versionmodified added">New in version 2.1.0.</span></p> | 
 | </div> | 
 | </p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.explain.html#pyspark.sql.streaming.StreamingQuery.explain" title="pyspark.sql.streaming.StreamingQuery.explain"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.explain</span></code></a>([extended])</p></td> | 
 | <td><p>Prints the (logical and physical) plans to the console for debugging purpose.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.id.html#pyspark.sql.streaming.StreamingQuery.id" title="pyspark.sql.streaming.StreamingQuery.id"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.id</span></code></a></p></td> | 
 | <td><p>Returns the unique id of this query that persists across restarts from checkpoint data.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.isActive.html#pyspark.sql.streaming.StreamingQuery.isActive" title="pyspark.sql.streaming.StreamingQuery.isActive"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.isActive</span></code></a></p></td> | 
 | <td><p>Whether this streaming query is currently active or not.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.lastProgress.html#pyspark.sql.streaming.StreamingQuery.lastProgress" title="pyspark.sql.streaming.StreamingQuery.lastProgress"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.lastProgress</span></code></a></p></td> | 
 | <td><p>Returns the most recent <code class="xref py py-class docutils literal notranslate"><span class="pre">StreamingQueryProgress</span></code> update of this streaming query or None if there were no progress updates</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.name.html#pyspark.sql.streaming.StreamingQuery.name" title="pyspark.sql.streaming.StreamingQuery.name"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.name</span></code></a></p></td> | 
 | <td><p>Returns the user-specified name of the query, or null if not specified.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.processAllAvailable.html#pyspark.sql.streaming.StreamingQuery.processAllAvailable" title="pyspark.sql.streaming.StreamingQuery.processAllAvailable"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.processAllAvailable</span></code></a>()</p></td> | 
 | <td><p>Blocks until all available data in the source has been processed and committed to the sink.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.recentProgress.html#pyspark.sql.streaming.StreamingQuery.recentProgress" title="pyspark.sql.streaming.StreamingQuery.recentProgress"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.recentProgress</span></code></a></p></td> | 
 | <td><p>Returns an array of the most recent [[StreamingQueryProgress]] updates for this query.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.runId.html#pyspark.sql.streaming.StreamingQuery.runId" title="pyspark.sql.streaming.StreamingQuery.runId"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.runId</span></code></a></p></td> | 
 | <td><p>Returns the unique id of this query that does not persist across restarts.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.status.html#pyspark.sql.streaming.StreamingQuery.status" title="pyspark.sql.streaming.StreamingQuery.status"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.status</span></code></a></p></td> | 
 | <td><p>Returns the current status of the query.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQuery.stop.html#pyspark.sql.streaming.StreamingQuery.stop" title="pyspark.sql.streaming.StreamingQuery.stop"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQuery.stop</span></code></a>()</p></td> | 
 | <td><p>Stop this streaming query.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQueryManager.active.html#pyspark.sql.streaming.StreamingQueryManager.active" title="pyspark.sql.streaming.StreamingQueryManager.active"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQueryManager.active</span></code></a></p></td> | 
 | <td><p>Returns a list of active queries associated with this SQLContext</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQueryManager.awaitAnyTermination.html#pyspark.sql.streaming.StreamingQueryManager.awaitAnyTermination" title="pyspark.sql.streaming.StreamingQueryManager.awaitAnyTermination"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQueryManager.awaitAnyTermination</span></code></a>([…])</p></td> | 
 | <td><p>Wait until any of the queries on the associated SQLContext has terminated since the creation of the context, or since <code class="xref py py-func docutils literal notranslate"><span class="pre">resetTerminated()</span></code> was called.</p></td> | 
 | </tr> | 
 | <tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQueryManager.get.html#pyspark.sql.streaming.StreamingQueryManager.get" title="pyspark.sql.streaming.StreamingQueryManager.get"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQueryManager.get</span></code></a>(id)</p></td> | 
 | <td><p>Returns an active query from this SQLContext or throws exception if an active query with this name doesn’t exist.</p></td> | 
 | </tr> | 
 | <tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.streaming.StreamingQueryManager.resetTerminated.html#pyspark.sql.streaming.StreamingQueryManager.resetTerminated" title="pyspark.sql.streaming.StreamingQueryManager.resetTerminated"><code class="xref py py-obj docutils literal notranslate"><span class="pre">StreamingQueryManager.resetTerminated</span></code></a>()</p></td> | 
 | <td><p>Forget about past terminated queries so that <code class="xref py py-func docutils literal notranslate"><span class="pre">awaitAnyTermination()</span></code> can be used again to wait for new terminations.</p></td> | 
 | </tr> | 
 | </tbody> | 
 | </table> | 
 | </div> | 
 | </div> | 
 |  | 
 |  | 
 |               </div> | 
 |                | 
 |                | 
 |               <div class='prev-next-bottom'> | 
 |                  | 
 |     <a class='left-prev' id="prev-link" href="pyspark.pandas/api/pyspark.pandas.extensions.register_index_accessor.html" title="previous page">pyspark.pandas.extensions.register_index_accessor</a> | 
 |     <a class='right-next' id="next-link" href="api/pyspark.sql.streaming.DataStreamReader.html" title="next page">pyspark.sql.streaming.DataStreamReader</a> | 
 |  | 
 |               </div> | 
 |                | 
 |           </main> | 
 |            | 
 |  | 
 |       </div> | 
 |     </div> | 
 |  | 
 |      | 
 |   <script src="../_static/js/index.3da636dd464baa7582d2.js"></script> | 
 |  | 
 |  | 
 |     <footer class="footer mt-5 mt-md-0"> | 
 |   <div class="container"> | 
 |     <p> | 
 |           © Copyright .<br/> | 
 |         Created using <a href="http://sphinx-doc.org/">Sphinx</a> 3.0.4.<br/> | 
 |     </p> | 
 |   </div> | 
 | </footer> | 
 |   </body> | 
 | </html> |