blob: 6599f2ee7957dbbbe827c9419580c5f8599010d9 [file] [log] [blame]
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>DefaultParamsWriter &#8212; PySpark 3.3.3 documentation</title>
<link rel="stylesheet" href="../../_static/css/index.73d71520a4ca3b99cfee5594769eaaae.css">
<link rel="stylesheet"
href="../../_static/vendor/fontawesome/5.13.0/css/all.min.css">
<link rel="preload" as="font" type="font/woff2" crossorigin
href="../../_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff2">
<link rel="preload" as="font" type="font/woff2" crossorigin
href="../../_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff2">
<link rel="stylesheet"
href="../../_static/vendor/open-sans_all/1.44.1/index.css">
<link rel="stylesheet"
href="../../_static/vendor/lato_latin-ext/1.44.1/index.css">
<link rel="stylesheet" href="../../_static/basic.css" type="text/css" />
<link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
<link rel="stylesheet" type="text/css" href="../../_static/css/pyspark.css" />
<link rel="preload" as="script" href="../../_static/js/index.3da636dd464baa7582d2.js">
<script id="documentation_options" data-url_root="../../" src="../../_static/documentation_options.js"></script>
<script src="../../_static/jquery.js"></script>
<script src="../../_static/underscore.js"></script>
<script src="../../_static/doctools.js"></script>
<script src="../../_static/language_data.js"></script>
<script src="../../_static/copybutton.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script>
<link rel="search" title="Search" href="../../search.html" />
<link rel="next" title="GeneralMLWriter" href="pyspark.ml.util.GeneralMLWriter.html" />
<link rel="prev" title="DefaultParamsWritable" href="pyspark.ml.util.DefaultParamsWritable.html" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="docsearch:language" content="en" />
</head>
<body data-spy="scroll" data-target="#bd-toc-nav" data-offset="80">
<nav class="navbar navbar-light navbar-expand-lg bg-light fixed-top bd-navbar" id="navbar-main">
<div class="container-xl">
<a class="navbar-brand" href="../../index.html">
<img src="../../_static/spark-logo-reverse.png" class="logo" alt="logo" />
</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbar-menu" aria-controls="navbar-menu" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div id="navbar-menu" class="col-lg-9 collapse navbar-collapse">
<ul id="navbar-main-elements" class="navbar-nav mr-auto">
<li class="nav-item ">
<a class="nav-link" href="../../getting_started/index.html">Getting Started</a>
</li>
<li class="nav-item ">
<a class="nav-link" href="../../user_guide/index.html">User Guide</a>
</li>
<li class="nav-item active">
<a class="nav-link" href="../index.html">API Reference</a>
</li>
<li class="nav-item ">
<a class="nav-link" href="../../development/index.html">Development</a>
</li>
<li class="nav-item ">
<a class="nav-link" href="../../migration_guide/index.html">Migration Guide</a>
</li>
</ul>
<ul class="navbar-nav">
</ul>
</div>
</div>
</nav>
<div class="container-xl">
<div class="row">
<div class="col-12 col-md-3 bd-sidebar"><form class="bd-search d-flex align-items-center" action="../../search.html" method="get">
<i class="icon fas fa-search"></i>
<input type="search" class="form-control" name="q" id="search-input" placeholder="Search the docs ..." aria-label="Search the docs ..." autocomplete="off" >
</form>
<nav class="bd-links" id="bd-docs-nav" aria-label="Main navigation">
<div class="bd-toc-item active">
<ul class="nav bd-sidenav">
<li class="">
<a href="../pyspark.sql/index.html">Spark SQL</a>
</li>
<li class="">
<a href="../pyspark.pandas/index.html">Pandas API on Spark</a>
</li>
<li class="">
<a href="../pyspark.ss/index.html">Structured Streaming</a>
</li>
<li class="active">
<a href="../pyspark.ml.html">MLlib (DataFrame-based)</a>
</li>
<li class="">
<a href="../pyspark.streaming.html">Spark Streaming</a>
</li>
<li class="">
<a href="../pyspark.mllib.html">MLlib (RDD-based)</a>
</li>
<li class="">
<a href="../pyspark.html">Spark Core</a>
</li>
<li class="">
<a href="../pyspark.resource.html">Resource Management</a>
</li>
</ul>
</nav>
</div>
<div class="d-none d-xl-block col-xl-2 bd-toc">
<nav id="bd-toc-nav">
<ul class="nav section-nav flex-column">
</ul>
</nav>
</div>
<main class="col-12 col-md-9 col-xl-7 py-md-5 pl-md-5 pr-md-4 bd-content" role="main">
<div>
<div class="section" id="defaultparamswriter">
<h1>DefaultParamsWriter<a class="headerlink" href="#defaultparamswriter" title="Permalink to this headline"></a></h1>
<dl class="py class">
<dt id="pyspark.ml.util.DefaultParamsWriter">
<em class="property">class </em><code class="sig-prename descclassname">pyspark.ml.util.</code><code class="sig-name descname">DefaultParamsWriter</code><span class="sig-paren">(</span><em class="sig-param"><span class="n">instance</span><span class="p">:</span> <span class="n">Params</span></em><span class="sig-paren">)</span><a class="reference internal" href="../../_modules/pyspark/ml/util.html#DefaultParamsWriter"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.util.DefaultParamsWriter" title="Permalink to this definition"></a></dt>
<dd><p>Specialization of <a class="reference internal" href="pyspark.ml.util.MLWriter.html#pyspark.ml.util.MLWriter" title="pyspark.ml.util.MLWriter"><code class="xref py py-class docutils literal notranslate"><span class="pre">MLWriter</span></code></a> for <code class="xref py py-class docutils literal notranslate"><span class="pre">Params</span></code> types</p>
<p>Class for writing Estimators and Transformers whose parameters are JSON-serializable.</p>
<div class="versionadded">
<p><span class="versionmodified added">New in version 2.3.0.</span></p>
</div>
<p class="rubric">Methods</p>
<table class="longtable table autosummary">
<colgroup>
<col style="width: 10%" />
<col style="width: 90%" />
</colgroup>
<tbody>
<tr class="row-odd"><td><p><a class="reference internal" href="#pyspark.ml.util.DefaultParamsWriter.extractJsonParams" title="pyspark.ml.util.DefaultParamsWriter.extractJsonParams"><code class="xref py py-obj docutils literal notranslate"><span class="pre">extractJsonParams</span></code></a>(instance, skipParams)</p></td>
<td><p></p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="#pyspark.ml.util.DefaultParamsWriter.option" title="pyspark.ml.util.DefaultParamsWriter.option"><code class="xref py py-obj docutils literal notranslate"><span class="pre">option</span></code></a>(key, value)</p></td>
<td><p>Adds an option to the underlying MLWriter.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="#pyspark.ml.util.DefaultParamsWriter.overwrite" title="pyspark.ml.util.DefaultParamsWriter.overwrite"><code class="xref py py-obj docutils literal notranslate"><span class="pre">overwrite</span></code></a>()</p></td>
<td><p>Overwrites if the output path already exists.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="#pyspark.ml.util.DefaultParamsWriter.save" title="pyspark.ml.util.DefaultParamsWriter.save"><code class="xref py py-obj docutils literal notranslate"><span class="pre">save</span></code></a>(path)</p></td>
<td><p>Save the ML instance to the input path.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="#pyspark.ml.util.DefaultParamsWriter.saveImpl" title="pyspark.ml.util.DefaultParamsWriter.saveImpl"><code class="xref py py-obj docutils literal notranslate"><span class="pre">saveImpl</span></code></a>(path)</p></td>
<td><p>save() handles overwriting and then calls this method.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="#pyspark.ml.util.DefaultParamsWriter.saveMetadata" title="pyspark.ml.util.DefaultParamsWriter.saveMetadata"><code class="xref py py-obj docutils literal notranslate"><span class="pre">saveMetadata</span></code></a>(instance, path, sc[, …])</p></td>
<td><p>Saves metadata + Params to: path + “/metadata”</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="#pyspark.ml.util.DefaultParamsWriter.session" title="pyspark.ml.util.DefaultParamsWriter.session"><code class="xref py py-obj docutils literal notranslate"><span class="pre">session</span></code></a>(sparkSession)</p></td>
<td><p>Sets the Spark Session to use for saving/loading.</p></td>
</tr>
</tbody>
</table>
<p class="rubric">Attributes</p>
<table class="longtable table autosummary">
<colgroup>
<col style="width: 10%" />
<col style="width: 90%" />
</colgroup>
<tbody>
<tr class="row-odd"><td><p><a class="reference internal" href="#pyspark.ml.util.DefaultParamsWriter.sc" title="pyspark.ml.util.DefaultParamsWriter.sc"><code class="xref py py-obj docutils literal notranslate"><span class="pre">sc</span></code></a></p></td>
<td><p>Returns the underlying <cite>SparkContext</cite>.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="#pyspark.ml.util.DefaultParamsWriter.sparkSession" title="pyspark.ml.util.DefaultParamsWriter.sparkSession"><code class="xref py py-obj docutils literal notranslate"><span class="pre">sparkSession</span></code></a></p></td>
<td><p>Returns the user-specified Spark Session or the default.</p></td>
</tr>
</tbody>
</table>
<p class="rubric">Methods Documentation</p>
<dl class="py method">
<dt id="pyspark.ml.util.DefaultParamsWriter.extractJsonParams">
<em class="property">static </em><code class="sig-name descname">extractJsonParams</code><span class="sig-paren">(</span><em class="sig-param"><span class="n">instance</span><span class="p">:</span> <span class="n">Params</span></em>, <em class="sig-param"><span class="n">skipParams</span><span class="p">:</span> <span class="n">Sequence<span class="p">[</span>str<span class="p">]</span></span></em><span class="sig-paren">)</span> &#x2192; Dict<span class="p">[</span>str<span class="p">, </span>Any<span class="p">]</span><a class="reference internal" href="../../_modules/pyspark/ml/util.html#DefaultParamsWriter.extractJsonParams"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.util.DefaultParamsWriter.extractJsonParams" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="py method">
<dt id="pyspark.ml.util.DefaultParamsWriter.option">
<code class="sig-name descname">option</code><span class="sig-paren">(</span><em class="sig-param"><span class="n">key</span><span class="p">:</span> <span class="n">str</span></em>, <em class="sig-param"><span class="n">value</span><span class="p">:</span> <span class="n">Any</span></em><span class="sig-paren">)</span> &#x2192; <a class="reference internal" href="pyspark.ml.util.MLWriter.html#pyspark.ml.util.MLWriter" title="pyspark.ml.util.MLWriter">pyspark.ml.util.MLWriter</a><a class="headerlink" href="#pyspark.ml.util.DefaultParamsWriter.option" title="Permalink to this definition"></a></dt>
<dd><p>Adds an option to the underlying MLWriter. See the documentation for the specific model’s
writer for possible options. The option name (key) is case-insensitive.</p>
</dd></dl>
<dl class="py method">
<dt id="pyspark.ml.util.DefaultParamsWriter.overwrite">
<code class="sig-name descname">overwrite</code><span class="sig-paren">(</span><span class="sig-paren">)</span> &#x2192; <a class="reference internal" href="pyspark.ml.util.MLWriter.html#pyspark.ml.util.MLWriter" title="pyspark.ml.util.MLWriter">pyspark.ml.util.MLWriter</a><a class="headerlink" href="#pyspark.ml.util.DefaultParamsWriter.overwrite" title="Permalink to this definition"></a></dt>
<dd><p>Overwrites if the output path already exists.</p>
</dd></dl>
<dl class="py method">
<dt id="pyspark.ml.util.DefaultParamsWriter.save">
<code class="sig-name descname">save</code><span class="sig-paren">(</span><em class="sig-param"><span class="n">path</span><span class="p">:</span> <span class="n">str</span></em><span class="sig-paren">)</span> &#x2192; None<a class="headerlink" href="#pyspark.ml.util.DefaultParamsWriter.save" title="Permalink to this definition"></a></dt>
<dd><p>Save the ML instance to the input path.</p>
</dd></dl>
<dl class="py method">
<dt id="pyspark.ml.util.DefaultParamsWriter.saveImpl">
<code class="sig-name descname">saveImpl</code><span class="sig-paren">(</span><em class="sig-param"><span class="n">path</span><span class="p">:</span> <span class="n">str</span></em><span class="sig-paren">)</span> &#x2192; None<a class="reference internal" href="../../_modules/pyspark/ml/util.html#DefaultParamsWriter.saveImpl"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.util.DefaultParamsWriter.saveImpl" title="Permalink to this definition"></a></dt>
<dd><p>save() handles overwriting and then calls this method. Subclasses should override this
method to implement the actual saving of the instance.</p>
</dd></dl>
<dl class="py method">
<dt id="pyspark.ml.util.DefaultParamsWriter.saveMetadata">
<em class="property">static </em><code class="sig-name descname">saveMetadata</code><span class="sig-paren">(</span><em class="sig-param"><span class="n">instance</span><span class="p">:</span> <span class="n">Params</span></em>, <em class="sig-param"><span class="n">path</span><span class="p">:</span> <span class="n">str</span></em>, <em class="sig-param"><span class="n">sc</span><span class="p">:</span> <span class="n">pyspark.context.SparkContext</span></em>, <em class="sig-param"><span class="n">extraMetadata</span><span class="p">:</span> <span class="n">Optional<span class="p">[</span>Dict<span class="p">[</span>str<span class="p">, </span>Any<span class="p">]</span><span class="p">]</span></span> <span class="o">=</span> <span class="default_value">None</span></em>, <em class="sig-param"><span class="n">paramMap</span><span class="p">:</span> <span class="n">Optional<span class="p">[</span>Dict<span class="p">[</span>str<span class="p">, </span>Any<span class="p">]</span><span class="p">]</span></span> <span class="o">=</span> <span class="default_value">None</span></em><span class="sig-paren">)</span> &#x2192; None<a class="reference internal" href="../../_modules/pyspark/ml/util.html#DefaultParamsWriter.saveMetadata"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.util.DefaultParamsWriter.saveMetadata" title="Permalink to this definition"></a></dt>
<dd><p>Saves metadata + Params to: path + “/metadata”</p>
<ul class="simple">
<li><p>class</p></li>
<li><p>timestamp</p></li>
<li><p>sparkVersion</p></li>
<li><p>uid</p></li>
<li><p>paramMap</p></li>
<li><p>defaultParamMap (since 2.4.0)</p></li>
<li><p>(optionally, extra metadata)</p></li>
</ul>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><dl class="simple">
<dt><strong>extraMetadata</strong><span class="classifier">dict, optional</span></dt><dd><p>Extra metadata to be saved at same level as uid, paramMap, etc.</p>
</dd>
<dt><strong>paramMap</strong><span class="classifier">dict, optional</span></dt><dd><p>If given, this is saved in the “paramMap” field.</p>
</dd>
</dl>
</dd>
</dl>
</dd></dl>
<dl class="py method">
<dt id="pyspark.ml.util.DefaultParamsWriter.session">
<code class="sig-name descname">session</code><span class="sig-paren">(</span><em class="sig-param"><span class="n">sparkSession</span><span class="p">:</span> <span class="n">pyspark.sql.session.SparkSession</span></em><span class="sig-paren">)</span> &#x2192; RW<a class="headerlink" href="#pyspark.ml.util.DefaultParamsWriter.session" title="Permalink to this definition"></a></dt>
<dd><p>Sets the Spark Session to use for saving/loading.</p>
</dd></dl>
<p class="rubric">Attributes Documentation</p>
<dl class="py attribute">
<dt id="pyspark.ml.util.DefaultParamsWriter.sc">
<code class="sig-name descname">sc</code><a class="headerlink" href="#pyspark.ml.util.DefaultParamsWriter.sc" title="Permalink to this definition"></a></dt>
<dd><p>Returns the underlying <cite>SparkContext</cite>.</p>
</dd></dl>
<dl class="py attribute">
<dt id="pyspark.ml.util.DefaultParamsWriter.sparkSession">
<code class="sig-name descname">sparkSession</code><a class="headerlink" href="#pyspark.ml.util.DefaultParamsWriter.sparkSession" title="Permalink to this definition"></a></dt>
<dd><p>Returns the user-specified Spark Session or the default.</p>
</dd></dl>
</dd></dl>
</div>
</div>
<div class='prev-next-bottom'>
<a class='left-prev' id="prev-link" href="pyspark.ml.util.DefaultParamsWritable.html" title="previous page">DefaultParamsWritable</a>
<a class='right-next' id="next-link" href="pyspark.ml.util.GeneralMLWriter.html" title="next page">GeneralMLWriter</a>
</div>
</main>
</div>
</div>
<script src="../../_static/js/index.3da636dd464baa7582d2.js"></script>
<footer class="footer mt-5 mt-md-0">
<div class="container">
<p>
&copy; Copyright .<br/>
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 3.0.4.<br/>
</p>
</div>
</footer>
</body>
</html>