blob: 0cace56c85b230da169dc94ff93c325c40db7a80 [file] [log] [blame]
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Spark Session &#8212; PySpark 3.3.3 documentation</title>
<link rel="stylesheet" href="../../_static/css/index.73d71520a4ca3b99cfee5594769eaaae.css">
<link rel="stylesheet"
href="../../_static/vendor/fontawesome/5.13.0/css/all.min.css">
<link rel="preload" as="font" type="font/woff2" crossorigin
href="../../_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff2">
<link rel="preload" as="font" type="font/woff2" crossorigin
href="../../_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff2">
<link rel="stylesheet"
href="../../_static/vendor/open-sans_all/1.44.1/index.css">
<link rel="stylesheet"
href="../../_static/vendor/lato_latin-ext/1.44.1/index.css">
<link rel="stylesheet" href="../../_static/basic.css" type="text/css" />
<link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
<link rel="stylesheet" type="text/css" href="../../_static/css/pyspark.css" />
<link rel="preload" as="script" href="../../_static/js/index.3da636dd464baa7582d2.js">
<script id="documentation_options" data-url_root="../../" src="../../_static/documentation_options.js"></script>
<script src="../../_static/jquery.js"></script>
<script src="../../_static/underscore.js"></script>
<script src="../../_static/doctools.js"></script>
<script src="../../_static/language_data.js"></script>
<script src="../../_static/copybutton.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script>
<link rel="search" title="Search" href="../../search.html" />
<link rel="next" title="pyspark.sql.SparkSession.builder.appName" href="api/pyspark.sql.SparkSession.builder.appName.html" />
<link rel="prev" title="pyspark.sql.DataFrameWriter" href="api/pyspark.sql.DataFrameWriter.html" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="docsearch:language" content="en" />
</head>
<body data-spy="scroll" data-target="#bd-toc-nav" data-offset="80">
<nav class="navbar navbar-light navbar-expand-lg bg-light fixed-top bd-navbar" id="navbar-main">
<div class="container-xl">
<a class="navbar-brand" href="../../index.html">
<img src="../../_static/spark-logo-reverse.png" class="logo" alt="logo" />
</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbar-menu" aria-controls="navbar-menu" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div id="navbar-menu" class="col-lg-9 collapse navbar-collapse">
<ul id="navbar-main-elements" class="navbar-nav mr-auto">
<li class="nav-item ">
<a class="nav-link" href="../../getting_started/index.html">Getting Started</a>
</li>
<li class="nav-item ">
<a class="nav-link" href="../../user_guide/index.html">User Guide</a>
</li>
<li class="nav-item active">
<a class="nav-link" href="../index.html">API Reference</a>
</li>
<li class="nav-item ">
<a class="nav-link" href="../../development/index.html">Development</a>
</li>
<li class="nav-item ">
<a class="nav-link" href="../../migration_guide/index.html">Migration Guide</a>
</li>
</ul>
<ul class="navbar-nav">
</ul>
</div>
</div>
</nav>
<div class="container-xl">
<div class="row">
<div class="col-12 col-md-3 bd-sidebar"><form class="bd-search d-flex align-items-center" action="../../search.html" method="get">
<i class="icon fas fa-search"></i>
<input type="search" class="form-control" name="q" id="search-input" placeholder="Search the docs ..." aria-label="Search the docs ..." autocomplete="off" >
</form>
<nav class="bd-links" id="bd-docs-nav" aria-label="Main navigation">
<div class="bd-toc-item active">
<ul class="nav bd-sidenav">
<li class="active">
<a href="index.html">Spark SQL</a>
<ul>
<li class="">
<a href="core_classes.html">Core Classes</a>
</li>
<li class="active">
<a href="">Spark Session</a>
</li>
<li class="">
<a href="configuration.html">Configuration</a>
</li>
<li class="">
<a href="io.html">Input/Output</a>
</li>
<li class="">
<a href="dataframe.html">DataFrame</a>
</li>
<li class="">
<a href="column.html">Column</a>
</li>
<li class="">
<a href="data_types.html">Data Types</a>
</li>
<li class="">
<a href="row.html">Row</a>
</li>
<li class="">
<a href="functions.html">Functions</a>
</li>
<li class="">
<a href="window.html">Window</a>
</li>
<li class="">
<a href="grouping.html">Grouping</a>
</li>
<li class="">
<a href="catalog.html">Catalog</a>
</li>
<li class="">
<a href="observation.html">Observation</a>
</li>
<li class="">
<a href="avro.html">Avro</a>
</li>
</ul>
</li>
<li class="">
<a href="../pyspark.pandas/index.html">Pandas API on Spark</a>
</li>
<li class="">
<a href="../pyspark.ss/index.html">Structured Streaming</a>
</li>
<li class="">
<a href="../pyspark.ml.html">MLlib (DataFrame-based)</a>
</li>
<li class="">
<a href="../pyspark.streaming.html">Spark Streaming</a>
</li>
<li class="">
<a href="../pyspark.mllib.html">MLlib (RDD-based)</a>
</li>
<li class="">
<a href="../pyspark.html">Spark Core</a>
</li>
<li class="">
<a href="../pyspark.resource.html">Resource Management</a>
</li>
</ul>
</nav>
</div>
<div class="d-none d-xl-block col-xl-2 bd-toc">
<nav id="bd-toc-nav">
<ul class="nav section-nav flex-column">
</ul>
</nav>
</div>
<main class="col-12 col-md-9 col-xl-7 py-md-5 pl-md-5 pr-md-4 bd-content" role="main">
<div>
<div class="section" id="spark-session">
<h1>Spark Session<a class="headerlink" href="#spark-session" title="Permalink to this headline"></a></h1>
<table class="longtable table autosummary">
<colgroup>
<col style="width: 10%" />
<col style="width: 90%" />
</colgroup>
<tbody>
</tbody>
</table>
<p>The entry point to programming Spark with the Dataset and DataFrame API.
To create a Spark session, you should use <code class="docutils literal notranslate"><span class="pre">SparkSession.builder</span></code> attribute.
See also <a class="reference internal" href="api/pyspark.sql.SparkSession.html#pyspark.sql.SparkSession" title="pyspark.sql.SparkSession"><code class="xref py py-class docutils literal notranslate"><span class="pre">SparkSession</span></code></a>.</p>
<table class="longtable table autosummary">
<colgroup>
<col style="width: 10%" />
<col style="width: 90%" />
</colgroup>
<tbody>
<tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.builder.appName.html#pyspark.sql.SparkSession.builder.appName" title="pyspark.sql.SparkSession.builder.appName"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.builder.appName</span></code></a>(name)</p></td>
<td><p>Sets a name for the application, which will be shown in the Spark web UI.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.builder.config.html#pyspark.sql.SparkSession.builder.config" title="pyspark.sql.SparkSession.builder.config"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.builder.config</span></code></a>([key, value, conf])</p></td>
<td><p>Sets a config option.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.builder.enableHiveSupport.html#pyspark.sql.SparkSession.builder.enableHiveSupport" title="pyspark.sql.SparkSession.builder.enableHiveSupport"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.builder.enableHiveSupport</span></code></a>()</p></td>
<td><p>Enables Hive support, including connectivity to a persistent Hive metastore, support for Hive SerDes, and Hive user-defined functions.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.builder.getOrCreate.html#pyspark.sql.SparkSession.builder.getOrCreate" title="pyspark.sql.SparkSession.builder.getOrCreate"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.builder.getOrCreate</span></code></a>()</p></td>
<td><p>Gets an existing <a class="reference internal" href="api/pyspark.sql.SparkSession.html#pyspark.sql.SparkSession" title="pyspark.sql.SparkSession"><code class="xref py py-class docutils literal notranslate"><span class="pre">SparkSession</span></code></a> or, if there is no existing one, creates a new one based on the options set in this builder.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.builder.master.html#pyspark.sql.SparkSession.builder.master" title="pyspark.sql.SparkSession.builder.master"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.builder.master</span></code></a>(master)</p></td>
<td><p>Sets the Spark master URL to connect to, such as “local” to run locally, “local[4]” to run locally with 4 cores, or “spark://master:7077” to run on a Spark standalone cluster.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.catalog.html#pyspark.sql.SparkSession.catalog" title="pyspark.sql.SparkSession.catalog"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.catalog</span></code></a></p></td>
<td><p>Interface through which the user may create, drop, alter or query underlying databases, tables, functions, etc.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.conf.html#pyspark.sql.SparkSession.conf" title="pyspark.sql.SparkSession.conf"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.conf</span></code></a></p></td>
<td><p>Runtime configuration interface for Spark.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.createDataFrame.html#pyspark.sql.SparkSession.createDataFrame" title="pyspark.sql.SparkSession.createDataFrame"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.createDataFrame</span></code></a>(data[, schema, …])</p></td>
<td><p>Creates a <a class="reference internal" href="api/pyspark.sql.DataFrame.html#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code></a> from an <code class="xref py py-class docutils literal notranslate"><span class="pre">RDD</span></code>, a list or a <code class="xref py py-class docutils literal notranslate"><span class="pre">pandas.DataFrame</span></code>.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.getActiveSession.html#pyspark.sql.SparkSession.getActiveSession" title="pyspark.sql.SparkSession.getActiveSession"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.getActiveSession</span></code></a>()</p></td>
<td><p>Returns the active <a class="reference internal" href="api/pyspark.sql.SparkSession.html#pyspark.sql.SparkSession" title="pyspark.sql.SparkSession"><code class="xref py py-class docutils literal notranslate"><span class="pre">SparkSession</span></code></a> for the current thread, returned by the builder</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.newSession.html#pyspark.sql.SparkSession.newSession" title="pyspark.sql.SparkSession.newSession"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.newSession</span></code></a>()</p></td>
<td><p>Returns a new <a class="reference internal" href="api/pyspark.sql.SparkSession.html#pyspark.sql.SparkSession" title="pyspark.sql.SparkSession"><code class="xref py py-class docutils literal notranslate"><span class="pre">SparkSession</span></code></a> as new session, that has separate SQLConf, registered temporary views and UDFs, but shared <code class="xref py py-class docutils literal notranslate"><span class="pre">SparkContext</span></code> and table cache.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.range.html#pyspark.sql.SparkSession.range" title="pyspark.sql.SparkSession.range"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.range</span></code></a>(start[, end, step, …])</p></td>
<td><p>Create a <a class="reference internal" href="api/pyspark.sql.DataFrame.html#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code></a> with single <a class="reference internal" href="api/pyspark.sql.types.LongType.html#pyspark.sql.types.LongType" title="pyspark.sql.types.LongType"><code class="xref py py-class docutils literal notranslate"><span class="pre">pyspark.sql.types.LongType</span></code></a> column named <code class="docutils literal notranslate"><span class="pre">id</span></code>, containing elements in a range from <code class="docutils literal notranslate"><span class="pre">start</span></code> to <code class="docutils literal notranslate"><span class="pre">end</span></code> (exclusive) with step value <code class="docutils literal notranslate"><span class="pre">step</span></code>.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.read.html#pyspark.sql.SparkSession.read" title="pyspark.sql.SparkSession.read"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.read</span></code></a></p></td>
<td><p>Returns a <a class="reference internal" href="api/pyspark.sql.DataFrameReader.html#pyspark.sql.DataFrameReader" title="pyspark.sql.DataFrameReader"><code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrameReader</span></code></a> that can be used to read data in as a <a class="reference internal" href="api/pyspark.sql.DataFrame.html#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code></a>.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.readStream.html#pyspark.sql.SparkSession.readStream" title="pyspark.sql.SparkSession.readStream"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.readStream</span></code></a></p></td>
<td><p>Returns a <code class="xref py py-class docutils literal notranslate"><span class="pre">DataStreamReader</span></code> that can be used to read data streams as a streaming <a class="reference internal" href="api/pyspark.sql.DataFrame.html#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code></a>.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.sparkContext.html#pyspark.sql.SparkSession.sparkContext" title="pyspark.sql.SparkSession.sparkContext"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.sparkContext</span></code></a></p></td>
<td><p>Returns the underlying <code class="xref py py-class docutils literal notranslate"><span class="pre">SparkContext</span></code>.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.sql.html#pyspark.sql.SparkSession.sql" title="pyspark.sql.SparkSession.sql"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.sql</span></code></a>(sqlQuery, **kwargs)</p></td>
<td><p>Returns a <a class="reference internal" href="api/pyspark.sql.DataFrame.html#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code></a> representing the result of the given query.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.stop.html#pyspark.sql.SparkSession.stop" title="pyspark.sql.SparkSession.stop"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.stop</span></code></a>()</p></td>
<td><p>Stop the underlying <code class="xref py py-class docutils literal notranslate"><span class="pre">SparkContext</span></code>.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.streams.html#pyspark.sql.SparkSession.streams" title="pyspark.sql.SparkSession.streams"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.streams</span></code></a></p></td>
<td><p>Returns a <code class="xref py py-class docutils literal notranslate"><span class="pre">StreamingQueryManager</span></code> that allows managing all the <code class="xref py py-class docutils literal notranslate"><span class="pre">StreamingQuery</span></code> instances active on <cite>this</cite> context.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.table.html#pyspark.sql.SparkSession.table" title="pyspark.sql.SparkSession.table"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.table</span></code></a>(tableName)</p></td>
<td><p>Returns the specified table as a <a class="reference internal" href="api/pyspark.sql.DataFrame.html#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal notranslate"><span class="pre">DataFrame</span></code></a>.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.udf.html#pyspark.sql.SparkSession.udf" title="pyspark.sql.SparkSession.udf"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.udf</span></code></a></p></td>
<td><p>Returns a <code class="xref py py-class docutils literal notranslate"><span class="pre">UDFRegistration</span></code> for UDF registration.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="api/pyspark.sql.SparkSession.version.html#pyspark.sql.SparkSession.version" title="pyspark.sql.SparkSession.version"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SparkSession.version</span></code></a></p></td>
<td><p>The version of Spark on which this application is running.</p></td>
</tr>
</tbody>
</table>
</div>
</div>
<div class='prev-next-bottom'>
<a class='left-prev' id="prev-link" href="api/pyspark.sql.DataFrameWriter.html" title="previous page">pyspark.sql.DataFrameWriter</a>
<a class='right-next' id="next-link" href="api/pyspark.sql.SparkSession.builder.appName.html" title="next page">pyspark.sql.SparkSession.builder.appName</a>
</div>
</main>
</div>
</div>
<script src="../../_static/js/index.3da636dd464baa7582d2.js"></script>
<footer class="footer mt-5 mt-md-0">
<div class="container">
<p>
&copy; Copyright .<br/>
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 3.0.4.<br/>
</p>
</div>
</footer>
</body>
</html>