blob: 47158497b49a309bde7f9fa84c7377aa041a2811 [file] [log] [blame]
<!DOCTYPE html>
<html >
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>pyspark.ml.stat &#8212; PySpark 4.0.0-preview1 documentation</title>
<script data-cfasync="false">
document.documentElement.dataset.mode = localStorage.getItem("mode") || "";
document.documentElement.dataset.theme = localStorage.getItem("theme") || "light";
</script>
<!-- Loaded before other Sphinx assets -->
<link href="../../../_static/styles/theme.css?digest=e353d410970836974a52" rel="stylesheet" />
<link href="../../../_static/styles/bootstrap.css?digest=e353d410970836974a52" rel="stylesheet" />
<link href="../../../_static/styles/pydata-sphinx-theme.css?digest=e353d410970836974a52" rel="stylesheet" />
<link href="../../../_static/vendor/fontawesome/6.1.2/css/all.min.css?digest=e353d410970836974a52" rel="stylesheet" />
<link rel="preload" as="font" type="font/woff2" crossorigin href="../../../_static/vendor/fontawesome/6.1.2/webfonts/fa-solid-900.woff2" />
<link rel="preload" as="font" type="font/woff2" crossorigin href="../../../_static/vendor/fontawesome/6.1.2/webfonts/fa-brands-400.woff2" />
<link rel="preload" as="font" type="font/woff2" crossorigin href="../../../_static/vendor/fontawesome/6.1.2/webfonts/fa-regular-400.woff2" />
<link rel="stylesheet" type="text/css" href="../../../_static/pygments.css" />
<link rel="stylesheet" type="text/css" href="../../../_static/copybutton.css" />
<link rel="stylesheet" type="text/css" href="../../../_static/css/pyspark.css" />
<!-- Pre-loaded scripts that we'll load fully later -->
<link rel="preload" as="script" href="../../../_static/scripts/bootstrap.js?digest=e353d410970836974a52" />
<link rel="preload" as="script" href="../../../_static/scripts/pydata-sphinx-theme.js?digest=e353d410970836974a52" />
<script data-url_root="../../../" id="documentation_options" src="../../../_static/documentation_options.js"></script>
<script src="../../../_static/jquery.js"></script>
<script src="../../../_static/underscore.js"></script>
<script src="../../../_static/doctools.js"></script>
<script src="../../../_static/clipboard.min.js"></script>
<script src="../../../_static/copybutton.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<script>DOCUMENTATION_OPTIONS.pagename = '_modules/pyspark/ml/stat';</script>
<link rel="canonical" href="https://spark.apache.org/docs/latest/api/python/_modules/pyspark/ml/stat.html" />
<link rel="search" title="Search" href="../../../search.html" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="docsearch:language" content="None">
<!-- Matomo -->
<script type="text/javascript">
var _paq = window._paq = window._paq || [];
/* tracker methods like "setCustomDimension" should be called before "trackPageView" */
_paq.push(["disableCookies"]);
_paq.push(['trackPageView']);
_paq.push(['enableLinkTracking']);
(function() {
var u="https://analytics.apache.org/";
_paq.push(['setTrackerUrl', u+'matomo.php']);
_paq.push(['setSiteId', '40']);
var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0];
g.async=true; g.src=u+'matomo.js'; s.parentNode.insertBefore(g,s);
})();
</script>
<!-- End Matomo Code -->
</head>
<body data-bs-spy="scroll" data-bs-target=".bd-toc-nav" data-offset="180" data-bs-root-margin="0px 0px -60%" data-default-mode="">
<a class="skip-link" href="#main-content">Skip to main content</a>
<input type="checkbox"
class="sidebar-toggle"
name="__primary"
id="__primary"/>
<label class="overlay overlay-primary" for="__primary"></label>
<input type="checkbox"
class="sidebar-toggle"
name="__secondary"
id="__secondary"/>
<label class="overlay overlay-secondary" for="__secondary"></label>
<div class="search-button__wrapper">
<div class="search-button__overlay"></div>
<div class="search-button__search-container">
<form class="bd-search d-flex align-items-center"
action="../../../search.html"
method="get">
<i class="fa-solid fa-magnifying-glass"></i>
<input type="search"
class="form-control"
name="q"
id="search-input"
placeholder="Search the docs ..."
aria-label="Search the docs ..."
autocomplete="off"
autocorrect="off"
autocapitalize="off"
spellcheck="false"/>
<span class="search-button__kbd-shortcut"><kbd class="kbd-shortcut__modifier">Ctrl</kbd>+<kbd>K</kbd></span>
</form></div>
</div>
<nav class="bd-header navbar navbar-expand-lg bd-navbar">
<div class="bd-header__inner bd-page-width">
<label class="sidebar-toggle primary-toggle" for="__primary">
<span class="fa-solid fa-bars"></span>
</label>
<div class="navbar-header-items__start">
<div class="navbar-item">
<a class="navbar-brand logo" href="../../../index.html">
<img src="../../../_static/spark-logo-light.png" class="logo__image only-light" alt="Logo image"/>
<script>document.write(`<img src="../../../_static/spark-logo-dark.png" class="logo__image only-dark" alt="Logo image"/>`);</script>
</a></div>
</div>
<div class="col-lg-9 navbar-header-items">
<div class="me-auto navbar-header-items__center">
<div class="navbar-item"><nav class="navbar-nav">
<p class="sidebar-header-items__title"
role="heading"
aria-level="1"
aria-label="Site Navigation">
Site Navigation
</p>
<ul class="bd-navbar-elements navbar-nav">
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../index.html">
Overview
</a>
</li>
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../getting_started/index.html">
Getting Started
</a>
</li>
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../user_guide/index.html">
User Guides
</a>
</li>
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../reference/index.html">
API Reference
</a>
</li>
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../development/index.html">
Development
</a>
</li>
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../migration_guide/index.html">
Migration Guides
</a>
</li>
</ul>
</nav></div>
</div>
<div class="navbar-header-items__end">
<div class="navbar-item navbar-persistent--container">
<script>
document.write(`
<button class="btn btn-sm navbar-btn search-button search-button__button" title="Search" aria-label="Search" data-bs-placement="bottom" data-bs-toggle="tooltip">
<i class="fa-solid fa-magnifying-glass"></i>
</button>
`);
</script>
</div>
<div class="navbar-item"><!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<div id="version-button" class="dropdown">
<button type="button" class="btn btn-secondary btn-sm navbar-btn dropdown-toggle" id="version_switcher_button" data-toggle="dropdown">
4.0.0-preview1
<span class="caret"></span>
</button>
<div id="version_switcher" class="dropdown-menu list-group-flush py-0" aria-labelledby="version_switcher_button">
<!-- dropdown will be populated by javascript on page load -->
</div>
</div>
<script type="text/javascript">
// Function to construct the target URL from the JSON components
function buildURL(entry) {
var template = "https://spark.apache.org/docs/{version}/api/python/index.html"; // supplied by jinja
template = template.replace("{version}", entry.version);
return template;
}
// Function to check if corresponding page path exists in other version of docs
// and, if so, go there instead of the homepage of the other docs version
function checkPageExistsAndRedirect(event) {
const currentFilePath = "_modules/pyspark/ml/stat.html",
otherDocsHomepage = event.target.getAttribute("href");
let tryUrl = `${otherDocsHomepage}${currentFilePath}`;
$.ajax({
type: 'HEAD',
url: tryUrl,
// if the page exists, go there
success: function() {
location.href = tryUrl;
}
}).fail(function() {
location.href = otherDocsHomepage;
});
return false;
}
// Function to populate the version switcher
(function () {
// get JSON config
$.getJSON("https://spark.apache.org/static/versions.json", function(data, textStatus, jqXHR) {
// create the nodes first (before AJAX calls) to ensure the order is
// correct (for now, links will go to doc version homepage)
$.each(data, function(index, entry) {
// if no custom name specified (e.g., "latest"), use version string
if (!("name" in entry)) {
entry.name = entry.version;
}
// construct the appropriate URL, and add it to the dropdown
entry.url = buildURL(entry);
const node = document.createElement("a");
node.setAttribute("class", "list-group-item list-group-item-action py-1");
node.setAttribute("href", `${entry.url}`);
node.textContent = `${entry.name}`;
node.onclick = checkPageExistsAndRedirect;
$("#version_switcher").append(node);
});
});
})();
</script></div>
<div class="navbar-item">
<script>
document.write(`
<button class="theme-switch-button btn btn-sm btn-outline-primary navbar-btn rounded-circle" title="light/dark" aria-label="light/dark" data-bs-placement="bottom" data-bs-toggle="tooltip">
<span class="theme-switch" data-mode="light"><i class="fa-solid fa-sun"></i></span>
<span class="theme-switch" data-mode="dark"><i class="fa-solid fa-moon"></i></span>
<span class="theme-switch" data-mode="auto"><i class="fa-solid fa-circle-half-stroke"></i></span>
</button>
`);
</script></div>
<div class="navbar-item"><ul class="navbar-icon-links navbar-nav"
aria-label="Icon Links">
<li class="nav-item">
<a href="https://github.com/apache/spark" title="GitHub" class="nav-link" rel="noopener" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><span><i class="fa-brands fa-github"></i></span>
<label class="sr-only">GitHub</label></a>
</li>
<li class="nav-item">
<a href="https://pypi.org/project/pyspark" title="PyPI" class="nav-link" rel="noopener" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><span><i class="fa-solid fa-box"></i></span>
<label class="sr-only">PyPI</label></a>
</li>
</ul></div>
</div>
</div>
<div class="navbar-persistent--mobile">
<script>
document.write(`
<button class="btn btn-sm navbar-btn search-button search-button__button" title="Search" aria-label="Search" data-bs-placement="bottom" data-bs-toggle="tooltip">
<i class="fa-solid fa-magnifying-glass"></i>
</button>
`);
</script>
</div>
</div>
</nav>
<div class="bd-container">
<div class="bd-container__inner bd-page-width">
<div class="bd-sidebar-primary bd-sidebar hide-on-wide">
<div class="sidebar-header-items sidebar-primary__section">
<div class="sidebar-header-items__center">
<div class="navbar-item"><nav class="navbar-nav">
<p class="sidebar-header-items__title"
role="heading"
aria-level="1"
aria-label="Site Navigation">
Site Navigation
</p>
<ul class="bd-navbar-elements navbar-nav">
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../index.html">
Overview
</a>
</li>
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../getting_started/index.html">
Getting Started
</a>
</li>
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../user_guide/index.html">
User Guides
</a>
</li>
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../reference/index.html">
API Reference
</a>
</li>
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../development/index.html">
Development
</a>
</li>
<li class="nav-item">
<a class="nav-link nav-internal" href="../../../migration_guide/index.html">
Migration Guides
</a>
</li>
</ul>
</nav></div>
</div>
<div class="sidebar-header-items__end">
<div class="navbar-item"><!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<div id="version-button" class="dropdown">
<button type="button" class="btn btn-secondary btn-sm navbar-btn dropdown-toggle" id="version_switcher_button" data-toggle="dropdown">
4.0.0-preview1
<span class="caret"></span>
</button>
<div id="version_switcher" class="dropdown-menu list-group-flush py-0" aria-labelledby="version_switcher_button">
<!-- dropdown will be populated by javascript on page load -->
</div>
</div>
<script type="text/javascript">
// Function to construct the target URL from the JSON components
function buildURL(entry) {
var template = "https://spark.apache.org/docs/{version}/api/python/index.html"; // supplied by jinja
template = template.replace("{version}", entry.version);
return template;
}
// Function to check if corresponding page path exists in other version of docs
// and, if so, go there instead of the homepage of the other docs version
function checkPageExistsAndRedirect(event) {
const currentFilePath = "_modules/pyspark/ml/stat.html",
otherDocsHomepage = event.target.getAttribute("href");
let tryUrl = `${otherDocsHomepage}${currentFilePath}`;
$.ajax({
type: 'HEAD',
url: tryUrl,
// if the page exists, go there
success: function() {
location.href = tryUrl;
}
}).fail(function() {
location.href = otherDocsHomepage;
});
return false;
}
// Function to populate the version switcher
(function () {
// get JSON config
$.getJSON("https://spark.apache.org/static/versions.json", function(data, textStatus, jqXHR) {
// create the nodes first (before AJAX calls) to ensure the order is
// correct (for now, links will go to doc version homepage)
$.each(data, function(index, entry) {
// if no custom name specified (e.g., "latest"), use version string
if (!("name" in entry)) {
entry.name = entry.version;
}
// construct the appropriate URL, and add it to the dropdown
entry.url = buildURL(entry);
const node = document.createElement("a");
node.setAttribute("class", "list-group-item list-group-item-action py-1");
node.setAttribute("href", `${entry.url}`);
node.textContent = `${entry.name}`;
node.onclick = checkPageExistsAndRedirect;
$("#version_switcher").append(node);
});
});
})();
</script></div>
<div class="navbar-item">
<script>
document.write(`
<button class="theme-switch-button btn btn-sm btn-outline-primary navbar-btn rounded-circle" title="light/dark" aria-label="light/dark" data-bs-placement="bottom" data-bs-toggle="tooltip">
<span class="theme-switch" data-mode="light"><i class="fa-solid fa-sun"></i></span>
<span class="theme-switch" data-mode="dark"><i class="fa-solid fa-moon"></i></span>
<span class="theme-switch" data-mode="auto"><i class="fa-solid fa-circle-half-stroke"></i></span>
</button>
`);
</script></div>
<div class="navbar-item"><ul class="navbar-icon-links navbar-nav"
aria-label="Icon Links">
<li class="nav-item">
<a href="https://github.com/apache/spark" title="GitHub" class="nav-link" rel="noopener" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><span><i class="fa-brands fa-github"></i></span>
<label class="sr-only">GitHub</label></a>
</li>
<li class="nav-item">
<a href="https://pypi.org/project/pyspark" title="PyPI" class="nav-link" rel="noopener" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><span><i class="fa-solid fa-box"></i></span>
<label class="sr-only">PyPI</label></a>
</li>
</ul></div>
</div>
</div>
<div class="sidebar-primary-items__end sidebar-primary__section">
</div>
<div id="rtd-footer-container"></div>
</div>
<main id="main-content" class="bd-main">
<div class="bd-content">
<div class="bd-article-container">
<div class="bd-header-article">
<div class="header-article-items header-article__inner">
<div class="header-article-items__start">
<div class="header-article-item">
<nav aria-label="Breadcrumbs">
<ul class="bd-breadcrumbs" role="navigation" aria-label="Breadcrumb">
<li class="breadcrumb-item breadcrumb-home">
<a href="../../../index.html" class="nav-link" aria-label="Home">
<i class="fa-solid fa-home"></i>
</a>
</li>
<li class="breadcrumb-item"><a href="../../index.html" class="nav-link">Module code</a></li>
<li class="breadcrumb-item active" aria-current="page">pyspark.ml.stat</li>
</ul>
</nav>
</div>
</div>
</div>
</div>
<div id="searchbox"></div>
<article class="bd-article" role="main">
<h1>Source code for pyspark.ml.stat</h1><div class="highlight"><pre>
<span></span><span class="c1">#</span>
<span class="c1"># Licensed to the Apache Software Foundation (ASF) under one or more</span>
<span class="c1"># contributor license agreements. See the NOTICE file distributed with</span>
<span class="c1"># this work for additional information regarding copyright ownership.</span>
<span class="c1"># The ASF licenses this file to You under the Apache License, Version 2.0</span>
<span class="c1"># (the &quot;License&quot;); you may not use this file except in compliance with</span>
<span class="c1"># the License. You may obtain a copy of the License at</span>
<span class="c1">#</span>
<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="c1">#</span>
<span class="c1"># Unless required by applicable law or agreed to in writing, software</span>
<span class="c1"># distributed under the License is distributed on an &quot;AS IS&quot; BASIS,</span>
<span class="c1"># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span>
<span class="c1"># See the License for the specific language governing permissions and</span>
<span class="c1"># limitations under the License.</span>
<span class="c1">#</span>
<span class="kn">import</span> <span class="nn">sys</span>
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Optional</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">TYPE_CHECKING</span>
<span class="kn">from</span> <span class="nn">pyspark</span> <span class="kn">import</span> <span class="n">since</span>
<span class="kn">from</span> <span class="nn">pyspark.ml.common</span> <span class="kn">import</span> <span class="n">_java2py</span><span class="p">,</span> <span class="n">_py2java</span>
<span class="kn">from</span> <span class="nn">pyspark.ml.linalg</span> <span class="kn">import</span> <span class="n">Matrix</span><span class="p">,</span> <span class="n">Vector</span>
<span class="kn">from</span> <span class="nn">pyspark.ml.wrapper</span> <span class="kn">import</span> <span class="n">JavaWrapper</span><span class="p">,</span> <span class="n">_jvm</span>
<span class="kn">from</span> <span class="nn">pyspark.sql.column</span> <span class="kn">import</span> <span class="n">Column</span>
<span class="kn">from</span> <span class="nn">pyspark.sql.dataframe</span> <span class="kn">import</span> <span class="n">DataFrame</span>
<span class="kn">from</span> <span class="nn">pyspark.sql.functions</span> <span class="kn">import</span> <span class="n">lit</span>
<span class="k">if</span> <span class="n">TYPE_CHECKING</span><span class="p">:</span>
<span class="kn">from</span> <span class="nn">py4j.java_gateway</span> <span class="kn">import</span> <span class="n">JavaObject</span>
<div class="viewcode-block" id="ChiSquareTest"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.ChiSquareTest.html#pyspark.ml.stat.ChiSquareTest">[docs]</a><span class="k">class</span> <span class="nc">ChiSquareTest</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Conduct Pearson&#39;s independence test for every feature against the label. For each feature,</span>
<span class="sd"> the (feature, label) pairs are converted into a contingency matrix for which the Chi-squared</span>
<span class="sd"> statistic is computed. All label and feature values must be categorical.</span>
<span class="sd"> The null hypothesis is that the occurrence of the outcomes is statistically independent.</span>
<span class="sd"> .. versionadded:: 2.2.0</span>
<span class="sd"> &quot;&quot;&quot;</span>
<div class="viewcode-block" id="ChiSquareTest.test"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.ChiSquareTest.html#pyspark.ml.stat.ChiSquareTest.test">[docs]</a> <span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">test</span><span class="p">(</span>
<span class="n">dataset</span><span class="p">:</span> <span class="n">DataFrame</span><span class="p">,</span> <span class="n">featuresCol</span><span class="p">:</span> <span class="nb">str</span><span class="p">,</span> <span class="n">labelCol</span><span class="p">:</span> <span class="nb">str</span><span class="p">,</span> <span class="n">flatten</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">False</span>
<span class="p">)</span> <span class="o">-&gt;</span> <span class="n">DataFrame</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Perform a Pearson&#39;s independence test using dataset.</span>
<span class="sd"> .. versionadded:: 2.2.0</span>
<span class="sd"> .. versionchanged:: 3.1.0</span>
<span class="sd"> Added optional ``flatten`` argument.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> dataset : :py:class:`pyspark.sql.DataFrame`</span>
<span class="sd"> DataFrame of categorical labels and categorical features.</span>
<span class="sd"> Real-valued features will be treated as categorical for each distinct value.</span>
<span class="sd"> featuresCol : str</span>
<span class="sd"> Name of features column in dataset, of type `Vector` (`VectorUDT`).</span>
<span class="sd"> labelCol : str</span>
<span class="sd"> Name of label column in dataset, of any numerical type.</span>
<span class="sd"> flatten : bool, optional</span>
<span class="sd"> if True, flattens the returned dataframe.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> :py:class:`pyspark.sql.DataFrame`</span>
<span class="sd"> DataFrame containing the test result for every feature against the label.</span>
<span class="sd"> If flatten is True, this DataFrame will contain one row per feature with the following</span>
<span class="sd"> fields:</span>
<span class="sd"> - `featureIndex: int`</span>
<span class="sd"> - `pValue: float`</span>
<span class="sd"> - `degreesOfFreedom: int`</span>
<span class="sd"> - `statistic: float`</span>
<span class="sd"> If flatten is False, this DataFrame will contain a single Row with the following fields:</span>
<span class="sd"> - `pValues: Vector`</span>
<span class="sd"> - `degreesOfFreedom: Array[int]`</span>
<span class="sd"> - `statistics: Vector`</span>
<span class="sd"> Each of these fields has one value per feature.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; from pyspark.ml.linalg import Vectors</span>
<span class="sd"> &gt;&gt;&gt; from pyspark.ml.stat import ChiSquareTest</span>
<span class="sd"> &gt;&gt;&gt; dataset = [[0, Vectors.dense([0, 0, 1])],</span>
<span class="sd"> ... [0, Vectors.dense([1, 0, 1])],</span>
<span class="sd"> ... [1, Vectors.dense([2, 1, 1])],</span>
<span class="sd"> ... [1, Vectors.dense([3, 1, 1])]]</span>
<span class="sd"> &gt;&gt;&gt; dataset = spark.createDataFrame(dataset, [&quot;label&quot;, &quot;features&quot;])</span>
<span class="sd"> &gt;&gt;&gt; chiSqResult = ChiSquareTest.test(dataset, &#39;features&#39;, &#39;label&#39;)</span>
<span class="sd"> &gt;&gt;&gt; chiSqResult.select(&quot;degreesOfFreedom&quot;).collect()[0]</span>
<span class="sd"> Row(degreesOfFreedom=[3, 1, 0])</span>
<span class="sd"> &gt;&gt;&gt; chiSqResult = ChiSquareTest.test(dataset, &#39;features&#39;, &#39;label&#39;, True)</span>
<span class="sd"> &gt;&gt;&gt; row = chiSqResult.orderBy(&quot;featureIndex&quot;).collect()</span>
<span class="sd"> &gt;&gt;&gt; row[0].statistic</span>
<span class="sd"> 4.0</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="kn">from</span> <span class="nn">pyspark.core.context</span> <span class="kn">import</span> <span class="n">SparkContext</span>
<span class="n">sc</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span>
<span class="k">assert</span> <span class="n">sc</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span>
<span class="n">javaTestObj</span> <span class="o">=</span> <span class="n">_jvm</span><span class="p">()</span><span class="o">.</span><span class="n">org</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">ml</span><span class="o">.</span><span class="n">stat</span><span class="o">.</span><span class="n">ChiSquareTest</span>
<span class="n">args</span> <span class="o">=</span> <span class="p">[</span><span class="n">_py2java</span><span class="p">(</span><span class="n">sc</span><span class="p">,</span> <span class="n">arg</span><span class="p">)</span> <span class="k">for</span> <span class="n">arg</span> <span class="ow">in</span> <span class="p">(</span><span class="n">dataset</span><span class="p">,</span> <span class="n">featuresCol</span><span class="p">,</span> <span class="n">labelCol</span><span class="p">,</span> <span class="n">flatten</span><span class="p">)]</span>
<span class="k">return</span> <span class="n">_java2py</span><span class="p">(</span><span class="n">sc</span><span class="p">,</span> <span class="n">javaTestObj</span><span class="o">.</span><span class="n">test</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">))</span></div></div>
<div class="viewcode-block" id="Correlation"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Correlation.html#pyspark.ml.stat.Correlation">[docs]</a><span class="k">class</span> <span class="nc">Correlation</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Compute the correlation matrix for the input dataset of Vectors using the specified method.</span>
<span class="sd"> Methods currently supported: `pearson` (default), `spearman`.</span>
<span class="sd"> .. versionadded:: 2.2.0</span>
<span class="sd"> Notes</span>
<span class="sd"> -----</span>
<span class="sd"> For Spearman, a rank correlation, we need to create an RDD[Double] for each column</span>
<span class="sd"> and sort it in order to retrieve the ranks and then join the columns back into an RDD[Vector],</span>
<span class="sd"> which is fairly costly. Cache the input Dataset before calling corr with `method = &#39;spearman&#39;`</span>
<span class="sd"> to avoid recomputing the common lineage.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<div class="viewcode-block" id="Correlation.corr"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Correlation.html#pyspark.ml.stat.Correlation.corr">[docs]</a> <span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">corr</span><span class="p">(</span><span class="n">dataset</span><span class="p">:</span> <span class="n">DataFrame</span><span class="p">,</span> <span class="n">column</span><span class="p">:</span> <span class="nb">str</span><span class="p">,</span> <span class="n">method</span><span class="p">:</span> <span class="nb">str</span> <span class="o">=</span> <span class="s2">&quot;pearson&quot;</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">DataFrame</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Compute the correlation matrix with specified method using dataset.</span>
<span class="sd"> .. versionadded:: 2.2.0</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> dataset : :py:class:`pyspark.sql.DataFrame`</span>
<span class="sd"> A DataFrame.</span>
<span class="sd"> column : str</span>
<span class="sd"> The name of the column of vectors for which the correlation coefficient needs</span>
<span class="sd"> to be computed. This must be a column of the dataset, and it must contain</span>
<span class="sd"> Vector objects.</span>
<span class="sd"> method : str, optional</span>
<span class="sd"> String specifying the method to use for computing correlation.</span>
<span class="sd"> Supported: `pearson` (default), `spearman`.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> A DataFrame that contains the correlation matrix of the column of vectors. This</span>
<span class="sd"> DataFrame contains a single row and a single column of name `METHODNAME(COLUMN)`.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; from pyspark.ml.linalg import DenseMatrix, Vectors</span>
<span class="sd"> &gt;&gt;&gt; from pyspark.ml.stat import Correlation</span>
<span class="sd"> &gt;&gt;&gt; dataset = [[Vectors.dense([1, 0, 0, -2])],</span>
<span class="sd"> ... [Vectors.dense([4, 5, 0, 3])],</span>
<span class="sd"> ... [Vectors.dense([6, 7, 0, 8])],</span>
<span class="sd"> ... [Vectors.dense([9, 0, 0, 1])]]</span>
<span class="sd"> &gt;&gt;&gt; dataset = spark.createDataFrame(dataset, [&#39;features&#39;])</span>
<span class="sd"> &gt;&gt;&gt; pearsonCorr = Correlation.corr(dataset, &#39;features&#39;, &#39;pearson&#39;).collect()[0][0]</span>
<span class="sd"> &gt;&gt;&gt; print(str(pearsonCorr).replace(&#39;nan&#39;, &#39;NaN&#39;))</span>
<span class="sd"> DenseMatrix([[ 1. , 0.0556..., NaN, 0.4004...],</span>
<span class="sd"> [ 0.0556..., 1. , NaN, 0.9135...],</span>
<span class="sd"> [ NaN, NaN, 1. , NaN],</span>
<span class="sd"> [ 0.4004..., 0.9135..., NaN, 1. ]])</span>
<span class="sd"> &gt;&gt;&gt; spearmanCorr = Correlation.corr(dataset, &#39;features&#39;, method=&#39;spearman&#39;).collect()[0][0]</span>
<span class="sd"> &gt;&gt;&gt; print(str(spearmanCorr).replace(&#39;nan&#39;, &#39;NaN&#39;))</span>
<span class="sd"> DenseMatrix([[ 1. , 0.1054..., NaN, 0.4 ],</span>
<span class="sd"> [ 0.1054..., 1. , NaN, 0.9486... ],</span>
<span class="sd"> [ NaN, NaN, 1. , NaN],</span>
<span class="sd"> [ 0.4 , 0.9486... , NaN, 1. ]])</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="kn">from</span> <span class="nn">pyspark.core.context</span> <span class="kn">import</span> <span class="n">SparkContext</span>
<span class="n">sc</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span>
<span class="k">assert</span> <span class="n">sc</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span>
<span class="n">javaCorrObj</span> <span class="o">=</span> <span class="n">_jvm</span><span class="p">()</span><span class="o">.</span><span class="n">org</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">ml</span><span class="o">.</span><span class="n">stat</span><span class="o">.</span><span class="n">Correlation</span>
<span class="n">args</span> <span class="o">=</span> <span class="p">[</span><span class="n">_py2java</span><span class="p">(</span><span class="n">sc</span><span class="p">,</span> <span class="n">arg</span><span class="p">)</span> <span class="k">for</span> <span class="n">arg</span> <span class="ow">in</span> <span class="p">(</span><span class="n">dataset</span><span class="p">,</span> <span class="n">column</span><span class="p">,</span> <span class="n">method</span><span class="p">)]</span>
<span class="k">return</span> <span class="n">_java2py</span><span class="p">(</span><span class="n">sc</span><span class="p">,</span> <span class="n">javaCorrObj</span><span class="o">.</span><span class="n">corr</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">))</span></div></div>
<div class="viewcode-block" id="KolmogorovSmirnovTest"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.KolmogorovSmirnovTest.html#pyspark.ml.stat.KolmogorovSmirnovTest">[docs]</a><span class="k">class</span> <span class="nc">KolmogorovSmirnovTest</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Conduct the two-sided Kolmogorov Smirnov (KS) test for data sampled from a continuous</span>
<span class="sd"> distribution.</span>
<span class="sd"> By comparing the largest difference between the empirical cumulative</span>
<span class="sd"> distribution of the sample data and the theoretical distribution we can provide a test for the</span>
<span class="sd"> the null hypothesis that the sample data comes from that theoretical distribution.</span>
<span class="sd"> .. versionadded:: 2.4.0</span>
<span class="sd"> &quot;&quot;&quot;</span>
<div class="viewcode-block" id="KolmogorovSmirnovTest.test"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.KolmogorovSmirnovTest.html#pyspark.ml.stat.KolmogorovSmirnovTest.test">[docs]</a> <span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">dataset</span><span class="p">:</span> <span class="n">DataFrame</span><span class="p">,</span> <span class="n">sampleCol</span><span class="p">:</span> <span class="nb">str</span><span class="p">,</span> <span class="n">distName</span><span class="p">:</span> <span class="nb">str</span><span class="p">,</span> <span class="o">*</span><span class="n">params</span><span class="p">:</span> <span class="nb">float</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">DataFrame</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Conduct a one-sample, two-sided Kolmogorov-Smirnov test for probability distribution</span>
<span class="sd"> equality. Currently supports the normal distribution, taking as parameters the mean and</span>
<span class="sd"> standard deviation.</span>
<span class="sd"> .. versionadded:: 2.4.0</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> dataset : :py:class:`pyspark.sql.DataFrame`</span>
<span class="sd"> a Dataset or a DataFrame containing the sample of data to test.</span>
<span class="sd"> sampleCol : str</span>
<span class="sd"> Name of sample column in dataset, of any numerical type.</span>
<span class="sd"> distName : str</span>
<span class="sd"> a `string` name for a theoretical distribution, currently only support &quot;norm&quot;.</span>
<span class="sd"> params : float</span>
<span class="sd"> a list of `float` values specifying the parameters to be used for the theoretical</span>
<span class="sd"> distribution. For &quot;norm&quot; distribution, the parameters includes mean and variance.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> A DataFrame that contains the Kolmogorov-Smirnov test result for the input sampled data.</span>
<span class="sd"> This DataFrame will contain a single Row with the following fields:</span>
<span class="sd"> - `pValue: Double`</span>
<span class="sd"> - `statistic: Double`</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; from pyspark.ml.stat import KolmogorovSmirnovTest</span>
<span class="sd"> &gt;&gt;&gt; dataset = [[-1.0], [0.0], [1.0]]</span>
<span class="sd"> &gt;&gt;&gt; dataset = spark.createDataFrame(dataset, [&#39;sample&#39;])</span>
<span class="sd"> &gt;&gt;&gt; ksResult = KolmogorovSmirnovTest.test(dataset, &#39;sample&#39;, &#39;norm&#39;, 0.0, 1.0).first()</span>
<span class="sd"> &gt;&gt;&gt; round(ksResult.pValue, 3)</span>
<span class="sd"> 1.0</span>
<span class="sd"> &gt;&gt;&gt; round(ksResult.statistic, 3)</span>
<span class="sd"> 0.175</span>
<span class="sd"> &gt;&gt;&gt; dataset = [[2.0], [3.0], [4.0]]</span>
<span class="sd"> &gt;&gt;&gt; dataset = spark.createDataFrame(dataset, [&#39;sample&#39;])</span>
<span class="sd"> &gt;&gt;&gt; ksResult = KolmogorovSmirnovTest.test(dataset, &#39;sample&#39;, &#39;norm&#39;, 3.0, 1.0).first()</span>
<span class="sd"> &gt;&gt;&gt; round(ksResult.pValue, 3)</span>
<span class="sd"> 1.0</span>
<span class="sd"> &gt;&gt;&gt; round(ksResult.statistic, 3)</span>
<span class="sd"> 0.175</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="kn">from</span> <span class="nn">pyspark.core.context</span> <span class="kn">import</span> <span class="n">SparkContext</span>
<span class="n">sc</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span>
<span class="k">assert</span> <span class="n">sc</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span>
<span class="n">javaTestObj</span> <span class="o">=</span> <span class="n">_jvm</span><span class="p">()</span><span class="o">.</span><span class="n">org</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">ml</span><span class="o">.</span><span class="n">stat</span><span class="o">.</span><span class="n">KolmogorovSmirnovTest</span>
<span class="n">dataset</span> <span class="o">=</span> <span class="n">_py2java</span><span class="p">(</span><span class="n">sc</span><span class="p">,</span> <span class="n">dataset</span><span class="p">)</span>
<span class="n">params</span> <span class="o">=</span> <span class="p">[</span><span class="nb">float</span><span class="p">(</span><span class="n">param</span><span class="p">)</span> <span class="k">for</span> <span class="n">param</span> <span class="ow">in</span> <span class="n">params</span><span class="p">]</span> <span class="c1"># type: ignore[assignment]</span>
<span class="k">return</span> <span class="n">_java2py</span><span class="p">(</span>
<span class="n">sc</span><span class="p">,</span> <span class="n">javaTestObj</span><span class="o">.</span><span class="n">test</span><span class="p">(</span><span class="n">dataset</span><span class="p">,</span> <span class="n">sampleCol</span><span class="p">,</span> <span class="n">distName</span><span class="p">,</span> <span class="n">_jvm</span><span class="p">()</span><span class="o">.</span><span class="n">PythonUtils</span><span class="o">.</span><span class="n">toSeq</span><span class="p">(</span><span class="n">params</span><span class="p">))</span>
<span class="p">)</span></div></div>
<div class="viewcode-block" id="Summarizer"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer">[docs]</a><span class="k">class</span> <span class="nc">Summarizer</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Tools for vectorized statistics on MLlib Vectors.</span>
<span class="sd"> The methods in this package provide various statistics for Vectors contained inside DataFrames.</span>
<span class="sd"> This class lets users pick the statistics they would like to extract for a given column.</span>
<span class="sd"> .. versionadded:: 2.4.0</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; from pyspark.ml.stat import Summarizer</span>
<span class="sd"> &gt;&gt;&gt; from pyspark.sql import Row</span>
<span class="sd"> &gt;&gt;&gt; from pyspark.ml.linalg import Vectors</span>
<span class="sd"> &gt;&gt;&gt; summarizer = Summarizer.metrics(&quot;mean&quot;, &quot;count&quot;)</span>
<span class="sd"> &gt;&gt;&gt; df = sc.parallelize([Row(weight=1.0, features=Vectors.dense(1.0, 1.0, 1.0)),</span>
<span class="sd"> ... Row(weight=0.0, features=Vectors.dense(1.0, 2.0, 3.0))]).toDF()</span>
<span class="sd"> &gt;&gt;&gt; df.select(summarizer.summary(df.features, df.weight)).show(truncate=False)</span>
<span class="sd"> +-----------------------------------+</span>
<span class="sd"> |aggregate_metrics(features, weight)|</span>
<span class="sd"> +-----------------------------------+</span>
<span class="sd"> |{[1.0,1.0,1.0], 1} |</span>
<span class="sd"> +-----------------------------------+</span>
<span class="sd"> &gt;&gt;&gt; df.select(summarizer.summary(df.features)).show(truncate=False)</span>
<span class="sd"> +--------------------------------+</span>
<span class="sd"> |aggregate_metrics(features, 1.0)|</span>
<span class="sd"> +--------------------------------+</span>
<span class="sd"> |{[1.0,1.5,2.0], 2} |</span>
<span class="sd"> +--------------------------------+</span>
<span class="sd"> &gt;&gt;&gt; df.select(Summarizer.mean(df.features, df.weight)).show(truncate=False)</span>
<span class="sd"> +--------------+</span>
<span class="sd"> |mean(features)|</span>
<span class="sd"> +--------------+</span>
<span class="sd"> |[1.0,1.0,1.0] |</span>
<span class="sd"> +--------------+</span>
<span class="sd"> &gt;&gt;&gt; df.select(Summarizer.mean(df.features)).show(truncate=False)</span>
<span class="sd"> +--------------+</span>
<span class="sd"> |mean(features)|</span>
<span class="sd"> +--------------+</span>
<span class="sd"> |[1.0,1.5,2.0] |</span>
<span class="sd"> +--------------+</span>
<span class="sd"> &quot;&quot;&quot;</span>
<div class="viewcode-block" id="Summarizer.mean"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.mean">[docs]</a> <span class="nd">@staticmethod</span>
<span class="nd">@since</span><span class="p">(</span><span class="s2">&quot;2.4.0&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">mean</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> return a column of mean summary</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">,</span> <span class="s2">&quot;mean&quot;</span><span class="p">)</span></div>
<div class="viewcode-block" id="Summarizer.sum"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.sum">[docs]</a> <span class="nd">@staticmethod</span>
<span class="nd">@since</span><span class="p">(</span><span class="s2">&quot;3.0.0&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">sum</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> return a column of sum summary</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">,</span> <span class="s2">&quot;sum&quot;</span><span class="p">)</span></div>
<div class="viewcode-block" id="Summarizer.variance"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.variance">[docs]</a> <span class="nd">@staticmethod</span>
<span class="nd">@since</span><span class="p">(</span><span class="s2">&quot;2.4.0&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">variance</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> return a column of variance summary</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">,</span> <span class="s2">&quot;variance&quot;</span><span class="p">)</span></div>
<div class="viewcode-block" id="Summarizer.std"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.std">[docs]</a> <span class="nd">@staticmethod</span>
<span class="nd">@since</span><span class="p">(</span><span class="s2">&quot;3.0.0&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">std</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> return a column of std summary</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">,</span> <span class="s2">&quot;std&quot;</span><span class="p">)</span></div>
<div class="viewcode-block" id="Summarizer.count"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.count">[docs]</a> <span class="nd">@staticmethod</span>
<span class="nd">@since</span><span class="p">(</span><span class="s2">&quot;2.4.0&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">count</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> return a column of count summary</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">,</span> <span class="s2">&quot;count&quot;</span><span class="p">)</span></div>
<div class="viewcode-block" id="Summarizer.numNonZeros"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.numNonZeros">[docs]</a> <span class="nd">@staticmethod</span>
<span class="nd">@since</span><span class="p">(</span><span class="s2">&quot;2.4.0&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">numNonZeros</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> return a column of numNonZero summary</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">,</span> <span class="s2">&quot;numNonZeros&quot;</span><span class="p">)</span></div>
<div class="viewcode-block" id="Summarizer.max"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.max">[docs]</a> <span class="nd">@staticmethod</span>
<span class="nd">@since</span><span class="p">(</span><span class="s2">&quot;2.4.0&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">max</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> return a column of max summary</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">,</span> <span class="s2">&quot;max&quot;</span><span class="p">)</span></div>
<div class="viewcode-block" id="Summarizer.min"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.min">[docs]</a> <span class="nd">@staticmethod</span>
<span class="nd">@since</span><span class="p">(</span><span class="s2">&quot;2.4.0&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">min</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> return a column of min summary</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">,</span> <span class="s2">&quot;min&quot;</span><span class="p">)</span></div>
<div class="viewcode-block" id="Summarizer.normL1"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.normL1">[docs]</a> <span class="nd">@staticmethod</span>
<span class="nd">@since</span><span class="p">(</span><span class="s2">&quot;2.4.0&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">normL1</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> return a column of normL1 summary</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">,</span> <span class="s2">&quot;normL1&quot;</span><span class="p">)</span></div>
<div class="viewcode-block" id="Summarizer.normL2"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.normL2">[docs]</a> <span class="nd">@staticmethod</span>
<span class="nd">@since</span><span class="p">(</span><span class="s2">&quot;2.4.0&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">normL2</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> return a column of normL2 summary</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">,</span> <span class="s2">&quot;normL2&quot;</span><span class="p">)</span></div>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_check_param</span><span class="p">(</span><span class="n">featuresCol</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">])</span> <span class="o">-&gt;</span> <span class="n">Tuple</span><span class="p">[</span><span class="n">Column</span><span class="p">,</span> <span class="n">Column</span><span class="p">]:</span>
<span class="k">if</span> <span class="n">weightCol</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">weightCol</span> <span class="o">=</span> <span class="n">lit</span><span class="p">(</span><span class="mf">1.0</span><span class="p">)</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">featuresCol</span><span class="p">,</span> <span class="n">Column</span><span class="p">)</span> <span class="ow">or</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">weightCol</span><span class="p">,</span> <span class="n">Column</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="s2">&quot;featureCol and weightCol should be a Column&quot;</span><span class="p">)</span>
<span class="k">return</span> <span class="n">featuresCol</span><span class="p">,</span> <span class="n">weightCol</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_get_single_metric</span><span class="p">(</span><span class="n">col</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">],</span> <span class="n">metric</span><span class="p">:</span> <span class="nb">str</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span> <span class="o">=</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_check_param</span><span class="p">(</span><span class="n">col</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">)</span>
<span class="k">return</span> <span class="n">Column</span><span class="p">(</span>
<span class="n">JavaWrapper</span><span class="o">.</span><span class="n">_new_java_obj</span><span class="p">(</span>
<span class="s2">&quot;org.apache.spark.ml.stat.Summarizer.&quot;</span> <span class="o">+</span> <span class="n">metric</span><span class="p">,</span> <span class="n">col</span><span class="o">.</span><span class="n">_jc</span><span class="p">,</span> <span class="n">weightCol</span><span class="o">.</span><span class="n">_jc</span>
<span class="p">)</span>
<span class="p">)</span>
<div class="viewcode-block" id="Summarizer.metrics"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.Summarizer.html#pyspark.ml.stat.Summarizer.metrics">[docs]</a> <span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">metrics</span><span class="p">(</span><span class="o">*</span><span class="n">metrics</span><span class="p">:</span> <span class="nb">str</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="s2">&quot;SummaryBuilder&quot;</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Given a list of metrics, provides a builder that it turns computes metrics from a column.</span>
<span class="sd"> See the documentation of :py:class:`Summarizer` for an example.</span>
<span class="sd"> The following metrics are accepted (case sensitive):</span>
<span class="sd"> - mean: a vector that contains the coefficient-wise mean.</span>
<span class="sd"> - sum: a vector that contains the coefficient-wise sum.</span>
<span class="sd"> - variance: a vector that contains the coefficient-wise variance.</span>
<span class="sd"> - std: a vector that contains the coefficient-wise standard deviation.</span>
<span class="sd"> - count: the count of all vectors seen.</span>
<span class="sd"> - numNonzeros: a vector with the number of non-zeros for each coefficients</span>
<span class="sd"> - max: the maximum for each coefficient.</span>
<span class="sd"> - min: the minimum for each coefficient.</span>
<span class="sd"> - normL2: the Euclidean norm for each coefficient.</span>
<span class="sd"> - normL1: the L1 norm of each coefficient (sum of the absolute values).</span>
<span class="sd"> .. versionadded:: 2.4.0</span>
<span class="sd"> Notes</span>
<span class="sd"> -----</span>
<span class="sd"> Currently, the performance of this interface is about 2x~3x slower than using the RDD</span>
<span class="sd"> interface.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> metrics : str</span>
<span class="sd"> metrics that can be provided.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> :py:class:`pyspark.ml.stat.SummaryBuilder`</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="kn">from</span> <span class="nn">pyspark.core.context</span> <span class="kn">import</span> <span class="n">SparkContext</span>
<span class="kn">from</span> <span class="nn">pyspark.sql.classic.column</span> <span class="kn">import</span> <span class="n">_to_seq</span>
<span class="n">sc</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span>
<span class="k">assert</span> <span class="n">sc</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span>
<span class="n">js</span> <span class="o">=</span> <span class="n">JavaWrapper</span><span class="o">.</span><span class="n">_new_java_obj</span><span class="p">(</span>
<span class="s2">&quot;org.apache.spark.ml.stat.Summarizer.metrics&quot;</span><span class="p">,</span> <span class="n">_to_seq</span><span class="p">(</span><span class="n">sc</span><span class="p">,</span> <span class="n">metrics</span><span class="p">)</span>
<span class="p">)</span>
<span class="k">return</span> <span class="n">SummaryBuilder</span><span class="p">(</span><span class="n">js</span><span class="p">)</span></div></div>
<div class="viewcode-block" id="SummaryBuilder"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.SummaryBuilder.html#pyspark.ml.stat.SummaryBuilder">[docs]</a><span class="k">class</span> <span class="nc">SummaryBuilder</span><span class="p">(</span><span class="n">JavaWrapper</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> A builder object that provides summary statistics about a given column.</span>
<span class="sd"> Users should not directly create such builders, but instead use one of the methods in</span>
<span class="sd"> :py:class:`pyspark.ml.stat.Summarizer`</span>
<span class="sd"> .. versionadded:: 2.4.0</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">jSummaryBuilder</span><span class="p">:</span> <span class="s2">&quot;JavaObject&quot;</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">SummaryBuilder</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="n">jSummaryBuilder</span><span class="p">)</span>
<div class="viewcode-block" id="SummaryBuilder.summary"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.SummaryBuilder.html#pyspark.ml.stat.SummaryBuilder.summary">[docs]</a> <span class="k">def</span> <span class="nf">summary</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">featuresCol</span><span class="p">:</span> <span class="n">Column</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Column</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Column</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Returns an aggregate object that contains the summary of the column with the requested</span>
<span class="sd"> metrics.</span>
<span class="sd"> .. versionadded:: 2.4.0</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> featuresCol : str</span>
<span class="sd"> a column that contains features Vector object.</span>
<span class="sd"> weightCol : str, optional</span>
<span class="sd"> a column that contains weight value. Default weight is 1.0.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> :py:class:`pyspark.sql.Column`</span>
<span class="sd"> an aggregate column that contains the statistics. The exact content of this</span>
<span class="sd"> structure is determined during the creation of the builder.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">featuresCol</span><span class="p">,</span> <span class="n">weightCol</span> <span class="o">=</span> <span class="n">Summarizer</span><span class="o">.</span><span class="n">_check_param</span><span class="p">(</span><span class="n">featuresCol</span><span class="p">,</span> <span class="n">weightCol</span><span class="p">)</span>
<span class="k">assert</span> <span class="bp">self</span><span class="o">.</span><span class="n">_java_obj</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span>
<span class="k">return</span> <span class="n">Column</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_java_obj</span><span class="o">.</span><span class="n">summary</span><span class="p">(</span><span class="n">featuresCol</span><span class="o">.</span><span class="n">_jc</span><span class="p">,</span> <span class="n">weightCol</span><span class="o">.</span><span class="n">_jc</span><span class="p">))</span></div></div>
<div class="viewcode-block" id="MultivariateGaussian"><a class="viewcode-back" href="../../../reference/api/pyspark.ml.stat.MultivariateGaussian.html#pyspark.ml.stat.MultivariateGaussian">[docs]</a><span class="k">class</span> <span class="nc">MultivariateGaussian</span><span class="p">:</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Represents a (mean, cov) tuple</span>
<span class="sd"> .. versionadded:: 3.0.0</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; from pyspark.ml.linalg import DenseMatrix, Vectors</span>
<span class="sd"> &gt;&gt;&gt; from pyspark.ml.stat import MultivariateGaussian</span>
<span class="sd"> &gt;&gt;&gt; m = MultivariateGaussian(Vectors.dense([11,12]), DenseMatrix(2, 2, (1.0, 3.0, 5.0, 2.0)))</span>
<span class="sd"> &gt;&gt;&gt; (m.mean, m.cov.toArray())</span>
<span class="sd"> (DenseVector([11.0, 12.0]), array([[ 1., 5.],</span>
<span class="sd"> [ 3., 2.]]))</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">mean</span><span class="p">:</span> <span class="n">Vector</span><span class="p">,</span> <span class="n">cov</span><span class="p">:</span> <span class="n">Matrix</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">mean</span> <span class="o">=</span> <span class="n">mean</span>
<span class="bp">self</span><span class="o">.</span><span class="n">cov</span> <span class="o">=</span> <span class="n">cov</span></div>
<span class="k">if</span> <span class="vm">__name__</span> <span class="o">==</span> <span class="s2">&quot;__main__&quot;</span><span class="p">:</span>
<span class="kn">import</span> <span class="nn">doctest</span>
<span class="kn">import</span> <span class="nn">numpy</span>
<span class="kn">import</span> <span class="nn">pyspark.ml.stat</span>
<span class="kn">from</span> <span class="nn">pyspark.sql</span> <span class="kn">import</span> <span class="n">SparkSession</span>
<span class="k">try</span><span class="p">:</span>
<span class="c1"># Numpy 1.14+ changed it&#39;s string format.</span>
<span class="n">numpy</span><span class="o">.</span><span class="n">set_printoptions</span><span class="p">(</span><span class="n">legacy</span><span class="o">=</span><span class="s2">&quot;1.13&quot;</span><span class="p">)</span>
<span class="k">except</span> <span class="ne">TypeError</span><span class="p">:</span>
<span class="k">pass</span>
<span class="n">globs</span> <span class="o">=</span> <span class="n">pyspark</span><span class="o">.</span><span class="n">ml</span><span class="o">.</span><span class="n">stat</span><span class="o">.</span><span class="vm">__dict__</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span>
<span class="c1"># The small batch size here ensures that we see multiple batches,</span>
<span class="c1"># even in these small test examples:</span>
<span class="n">spark</span> <span class="o">=</span> <span class="n">SparkSession</span><span class="o">.</span><span class="n">builder</span><span class="o">.</span><span class="n">master</span><span class="p">(</span><span class="s2">&quot;local[2]&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">appName</span><span class="p">(</span><span class="s2">&quot;ml.stat tests&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">getOrCreate</span><span class="p">()</span>
<span class="n">sc</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">sparkContext</span>
<span class="n">globs</span><span class="p">[</span><span class="s2">&quot;sc&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">sc</span>
<span class="n">globs</span><span class="p">[</span><span class="s2">&quot;spark&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">spark</span>
<span class="n">failure_count</span><span class="p">,</span> <span class="n">test_count</span> <span class="o">=</span> <span class="n">doctest</span><span class="o">.</span><span class="n">testmod</span><span class="p">(</span>
<span class="n">globs</span><span class="o">=</span><span class="n">globs</span><span class="p">,</span> <span class="n">optionflags</span><span class="o">=</span><span class="n">doctest</span><span class="o">.</span><span class="n">ELLIPSIS</span> <span class="o">|</span> <span class="n">doctest</span><span class="o">.</span><span class="n">NORMALIZE_WHITESPACE</span>
<span class="p">)</span>
<span class="n">spark</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
<span class="k">if</span> <span class="n">failure_count</span><span class="p">:</span>
<span class="n">sys</span><span class="o">.</span><span class="n">exit</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span>
</pre></div>
</article>
<footer class="bd-footer-article">
<div class="footer-article-items footer-article__inner">
<div class="footer-article-item"><!-- Previous / next buttons -->
<div class="prev-next-area">
</div></div>
</div>
</footer>
</div>
</div>
<footer class="bd-footer-content">
</footer>
</main>
</div>
</div>
<!-- Scripts loaded after <body> so the DOM is not blocked -->
<script src="../../../_static/scripts/bootstrap.js?digest=e353d410970836974a52"></script>
<script src="../../../_static/scripts/pydata-sphinx-theme.js?digest=e353d410970836974a52"></script>
<footer class="bd-footer">
<div class="bd-footer__inner bd-page-width">
<div class="footer-items__start">
<div class="footer-item"><p class="copyright">
Copyright @ 2024 The Apache Software Foundation, Licensed under the <a href="https://www.apache.org/licenses/LICENSE-2.0">Apache License, Version 2.0</a>.
</p></div>
<div class="footer-item">
<p class="sphinx-version">
Created using <a href="https://www.sphinx-doc.org/">Sphinx</a> 4.5.0.
<br/>
</p>
</div>
</div>
<div class="footer-items__end">
<div class="footer-item"><p class="theme-version">
Built with the <a href="https://pydata-sphinx-theme.readthedocs.io/en/stable/index.html">PyData Sphinx Theme</a> 0.13.3.
</p></div>
</div>
</div>
</footer>
</body>
</html>