| |
| <!DOCTYPE html> |
| |
| <html> |
| <head> |
| <meta charset="utf-8" /> |
| <title>Testing PySpark — PySpark 3.5.5 documentation</title> |
| |
| <link href="../_static/styles/theme.css?digest=1999514e3f237ded88cf" rel="stylesheet"> |
| <link href="../_static/styles/pydata-sphinx-theme.css?digest=1999514e3f237ded88cf" rel="stylesheet"> |
| |
| |
| <link rel="stylesheet" |
| href="../_static/vendor/fontawesome/5.13.0/css/all.min.css"> |
| <link rel="preload" as="font" type="font/woff2" crossorigin |
| href="../_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff2"> |
| <link rel="preload" as="font" type="font/woff2" crossorigin |
| href="../_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff2"> |
| |
| |
| |
| |
| |
| <link rel="stylesheet" href="../_static/styles/pydata-sphinx-theme.css" type="text/css" /> |
| <link rel="stylesheet" href="../_static/pygments.css" type="text/css" /> |
| <link rel="stylesheet" type="text/css" href="../_static/copybutton.css" /> |
| <link rel="stylesheet" type="text/css" href="../_static/css/pyspark.css" /> |
| |
| <link rel="preload" as="script" href="../_static/scripts/pydata-sphinx-theme.js?digest=1999514e3f237ded88cf"> |
| |
| <script id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script> |
| <script src="../_static/jquery.js"></script> |
| <script src="../_static/underscore.js"></script> |
| <script src="../_static/doctools.js"></script> |
| <script src="../_static/language_data.js"></script> |
| <script src="../_static/clipboard.min.js"></script> |
| <script src="../_static/copybutton.js"></script> |
| <script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script> |
| <script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script> |
| <script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script> |
| <link rel="canonical" href="https://spark.apache.org/docs/latest/api/python/getting_started/testing_pyspark.html" /> |
| <link rel="search" title="Search" href="../search.html" /> |
| <link rel="next" title="User Guides" href="../user_guide/index.html" /> |
| <link rel="prev" title="Quickstart: Pandas API on Spark" href="quickstart_ps.html" /> |
| <meta name="viewport" content="width=device-width, initial-scale=1" /> |
| <meta name="docsearch:language" content="None"> |
| |
| |
| <!-- Google Analytics --> |
| |
| </head> |
| <body data-spy="scroll" data-target="#bd-toc-nav" data-offset="80"> |
| |
| <div class="container-fluid" id="banner"></div> |
| |
| |
| <nav class="navbar navbar-light navbar-expand-lg bg-light fixed-top bd-navbar" id="navbar-main"><div class="container-xl"> |
| |
| <div id="navbar-start"> |
| |
| |
| |
| <a class="navbar-brand" href="../index.html"> |
| <img src="../_static/spark-logo-reverse.png" class="logo" alt="logo"> |
| </a> |
| |
| |
| |
| </div> |
| |
| <button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbar-collapsible" aria-controls="navbar-collapsible" aria-expanded="false" aria-label="Toggle navigation"> |
| <span class="navbar-toggler-icon"></span> |
| </button> |
| |
| |
| <div id="navbar-collapsible" class="col-lg-9 collapse navbar-collapse"> |
| <div id="navbar-center" class="mr-auto"> |
| |
| <div class="navbar-center-item"> |
| <ul id="navbar-main-elements" class="navbar-nav"> |
| <li class="toctree-l1 nav-item"> |
| <a class="reference internal nav-link" href="../index.html"> |
| Overview |
| </a> |
| </li> |
| |
| <li class="toctree-l1 current active nav-item"> |
| <a class="reference internal nav-link" href="index.html"> |
| Getting Started |
| </a> |
| </li> |
| |
| <li class="toctree-l1 nav-item"> |
| <a class="reference internal nav-link" href="../user_guide/index.html"> |
| User Guides |
| </a> |
| </li> |
| |
| <li class="toctree-l1 nav-item"> |
| <a class="reference internal nav-link" href="../reference/index.html"> |
| API Reference |
| </a> |
| </li> |
| |
| <li class="toctree-l1 nav-item"> |
| <a class="reference internal nav-link" href="../development/index.html"> |
| Development |
| </a> |
| </li> |
| |
| <li class="toctree-l1 nav-item"> |
| <a class="reference internal nav-link" href="../migration_guide/index.html"> |
| Migration Guides |
| </a> |
| </li> |
| |
| |
| </ul> |
| </div> |
| |
| </div> |
| |
| <div id="navbar-end"> |
| |
| <div class="navbar-end-item"> |
| <!-- |
| Licensed to the Apache Software Foundation (ASF) under one or more |
| contributor license agreements. See the NOTICE file distributed with |
| this work for additional information regarding copyright ownership. |
| The ASF licenses this file to You under the Apache License, Version 2.0 |
| (the "License"); you may not use this file except in compliance with |
| the License. You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, software |
| distributed under the License is distributed on an "AS IS" BASIS, |
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| See the License for the specific language governing permissions and |
| limitations under the License. |
| --> |
| |
| <div id="version-button" class="dropdown"> |
| <button type="button" class="btn btn-secondary btn-sm navbar-btn dropdown-toggle" id="version_switcher_button" data-toggle="dropdown"> |
| 3.5.5 |
| <span class="caret"></span> |
| </button> |
| <div id="version_switcher" class="dropdown-menu list-group-flush py-0" aria-labelledby="version_switcher_button"> |
| <!-- dropdown will be populated by javascript on page load --> |
| </div> |
| </div> |
| |
| <script type="text/javascript"> |
| // Function to construct the target URL from the JSON components |
| function buildURL(entry) { |
| var template = "https://spark.apache.org/docs/{version}/api/python/index.html"; // supplied by jinja |
| template = template.replace("{version}", entry.version); |
| return template; |
| } |
| |
| // Function to check if corresponding page path exists in other version of docs |
| // and, if so, go there instead of the homepage of the other docs version |
| function checkPageExistsAndRedirect(event) { |
| const currentFilePath = "getting_started/testing_pyspark.html", |
| otherDocsHomepage = event.target.getAttribute("href"); |
| let tryUrl = `${otherDocsHomepage}${currentFilePath}`; |
| $.ajax({ |
| type: 'HEAD', |
| url: tryUrl, |
| // if the page exists, go there |
| success: function() { |
| location.href = tryUrl; |
| } |
| }).fail(function() { |
| location.href = otherDocsHomepage; |
| }); |
| return false; |
| } |
| |
| // Function to populate the version switcher |
| (function () { |
| // get JSON config |
| $.getJSON("https://spark.apache.org/static/versions.json", function(data, textStatus, jqXHR) { |
| // create the nodes first (before AJAX calls) to ensure the order is |
| // correct (for now, links will go to doc version homepage) |
| $.each(data, function(index, entry) { |
| // if no custom name specified (e.g., "latest"), use version string |
| if (!("name" in entry)) { |
| entry.name = entry.version; |
| } |
| // construct the appropriate URL, and add it to the dropdown |
| entry.url = buildURL(entry); |
| const node = document.createElement("a"); |
| node.setAttribute("class", "list-group-item list-group-item-action py-1"); |
| node.setAttribute("href", `${entry.url}`); |
| node.textContent = `${entry.name}`; |
| node.onclick = checkPageExistsAndRedirect; |
| $("#version_switcher").append(node); |
| }); |
| }); |
| })(); |
| </script> |
| </div> |
| |
| </div> |
| </div> |
| </div> |
| </nav> |
| |
| |
| <div class="container-xl"> |
| <div class="row"> |
| |
| |
| <!-- Only show if we have sidebars configured, else just a small margin --> |
| <div class="col-12 col-md-3 bd-sidebar"> |
| <div class="sidebar-start-items"><form class="bd-search d-flex align-items-center" action="../search.html" method="get"> |
| <i class="icon fas fa-search"></i> |
| <input type="search" class="form-control" name="q" id="search-input" placeholder="Search the docs ..." aria-label="Search the docs ..." autocomplete="off" > |
| </form><nav class="bd-links" id="bd-docs-nav" aria-label="Main navigation"> |
| <div class="bd-toc-item active"> |
| <ul class="current nav bd-sidenav"> |
| <li class="toctree-l1"> |
| <a class="reference internal" href="install.html"> |
| Installation |
| </a> |
| </li> |
| <li class="toctree-l1"> |
| <a class="reference internal" href="quickstart_df.html"> |
| Quickstart: DataFrame |
| </a> |
| </li> |
| <li class="toctree-l1"> |
| <a class="reference internal" href="quickstart_connect.html"> |
| Quickstart: Spark Connect |
| </a> |
| </li> |
| <li class="toctree-l1"> |
| <a class="reference internal" href="quickstart_ps.html"> |
| Quickstart: Pandas API on Spark |
| </a> |
| </li> |
| <li class="toctree-l1 current active"> |
| <a class="current reference internal" href="#"> |
| Testing PySpark |
| </a> |
| </li> |
| </ul> |
| |
| </div> |
| </nav> |
| </div> |
| <div class="sidebar-end-items"> |
| </div> |
| </div> |
| |
| |
| |
| |
| <div class="d-none d-xl-block col-xl-2 bd-toc"> |
| |
| |
| <div class="toc-item"> |
| |
| <div class="tocsection onthispage pt-5 pb-3"> |
| <i class="fas fa-list"></i> On this page |
| </div> |
| |
| <nav id="bd-toc-nav"> |
| <ul class="visible nav section-nav flex-column"> |
| <li class="toc-h2 nav-item toc-entry"> |
| <a class="reference internal nav-link" href="#Build-a-PySpark-Application"> |
| Build a PySpark Application |
| </a> |
| </li> |
| <li class="toc-h2 nav-item toc-entry"> |
| <a class="reference internal nav-link" href="#Testing-your-PySpark-Application"> |
| Testing your PySpark Application |
| </a> |
| <ul class="nav section-nav flex-column"> |
| <li class="toc-h3 nav-item toc-entry"> |
| <a class="reference internal nav-link" href="#Option-1:-Using-Only-PySpark-Built-in-Test-Utility-Functions"> |
| Option 1: Using Only PySpark Built-in Test Utility Functions |
| </a> |
| </li> |
| <li class="toc-h3 nav-item toc-entry"> |
| <a class="reference internal nav-link" href="#Option-2:-Using-Unit-Test"> |
| Option 2: Using Unit Test |
| </a> |
| </li> |
| <li class="toc-h3 nav-item toc-entry"> |
| <a class="reference internal nav-link" href="#Option-3:-Using-Pytest"> |
| Option 3: Using Pytest |
| </a> |
| </li> |
| </ul> |
| </li> |
| <li class="toc-h2 nav-item toc-entry"> |
| <a class="reference internal nav-link" href="#Putting-It-All-Together!"> |
| Putting It All Together! |
| </a> |
| </li> |
| </ul> |
| |
| </nav> |
| </div> |
| |
| <div class="toc-item"> |
| |
| </div> |
| |
| |
| </div> |
| |
| |
| |
| |
| |
| |
| <main class="col-12 col-md-9 col-xl-7 py-md-5 pl-md-5 pr-md-4 bd-content" role="main"> |
| |
| <div> |
| |
| |
| <style> |
| /* CSS for nbsphinx extension */ |
| |
| /* remove conflicting styling from Sphinx themes */ |
| div.nbinput.container div.prompt *, |
| div.nboutput.container div.prompt *, |
| div.nbinput.container div.input_area pre, |
| div.nboutput.container div.output_area pre, |
| div.nbinput.container div.input_area .highlight, |
| div.nboutput.container div.output_area .highlight { |
| border: none; |
| padding: 0; |
| margin: 0; |
| box-shadow: none; |
| } |
| |
| div.nbinput.container > div[class*=highlight], |
| div.nboutput.container > div[class*=highlight] { |
| margin: 0; |
| } |
| |
| div.nbinput.container div.prompt *, |
| div.nboutput.container div.prompt * { |
| background: none; |
| } |
| |
| div.nboutput.container div.output_area .highlight, |
| div.nboutput.container div.output_area pre { |
| background: unset; |
| } |
| |
| div.nboutput.container div.output_area div.highlight { |
| color: unset; /* override Pygments text color */ |
| } |
| |
| /* avoid gaps between output lines */ |
| div.nboutput.container div[class*=highlight] pre { |
| line-height: normal; |
| } |
| |
| /* input/output containers */ |
| div.nbinput.container, |
| div.nboutput.container { |
| display: -webkit-flex; |
| display: flex; |
| align-items: flex-start; |
| margin: 0; |
| width: 100%; |
| } |
| @media (max-width: 540px) { |
| div.nbinput.container, |
| div.nboutput.container { |
| flex-direction: column; |
| } |
| } |
| |
| /* input container */ |
| div.nbinput.container { |
| padding-top: 5px; |
| } |
| |
| /* last container */ |
| div.nblast.container { |
| padding-bottom: 5px; |
| } |
| |
| /* input prompt */ |
| div.nbinput.container div.prompt pre { |
| color: #307FC1; |
| } |
| |
| /* output prompt */ |
| div.nboutput.container div.prompt pre { |
| color: #BF5B3D; |
| } |
| |
| /* all prompts */ |
| div.nbinput.container div.prompt, |
| div.nboutput.container div.prompt { |
| width: 4.5ex; |
| padding-top: 5px; |
| position: relative; |
| user-select: none; |
| } |
| |
| div.nbinput.container div.prompt > div, |
| div.nboutput.container div.prompt > div { |
| position: absolute; |
| right: 0; |
| margin-right: 0.3ex; |
| } |
| |
| @media (max-width: 540px) { |
| div.nbinput.container div.prompt, |
| div.nboutput.container div.prompt { |
| width: unset; |
| text-align: left; |
| padding: 0.4em; |
| } |
| div.nboutput.container div.prompt.empty { |
| padding: 0; |
| } |
| |
| div.nbinput.container div.prompt > div, |
| div.nboutput.container div.prompt > div { |
| position: unset; |
| } |
| } |
| |
| /* disable scrollbars on prompts */ |
| div.nbinput.container div.prompt pre, |
| div.nboutput.container div.prompt pre { |
| overflow: hidden; |
| } |
| |
| /* input/output area */ |
| div.nbinput.container div.input_area, |
| div.nboutput.container div.output_area { |
| -webkit-flex: 1; |
| flex: 1; |
| overflow: auto; |
| } |
| @media (max-width: 540px) { |
| div.nbinput.container div.input_area, |
| div.nboutput.container div.output_area { |
| width: 100%; |
| } |
| } |
| |
| /* input area */ |
| div.nbinput.container div.input_area { |
| border: 1px solid #e0e0e0; |
| border-radius: 2px; |
| /*background: #f5f5f5;*/ |
| } |
| |
| /* override MathJax center alignment in output cells */ |
| div.nboutput.container div[class*=MathJax] { |
| text-align: left !important; |
| } |
| |
| /* override sphinx.ext.imgmath center alignment in output cells */ |
| div.nboutput.container div.math p { |
| text-align: left; |
| } |
| |
| /* standard error */ |
| div.nboutput.container div.output_area.stderr { |
| background: #fdd; |
| } |
| |
| /* ANSI colors */ |
| .ansi-black-fg { color: #3E424D; } |
| .ansi-black-bg { background-color: #3E424D; } |
| .ansi-black-intense-fg { color: #282C36; } |
| .ansi-black-intense-bg { background-color: #282C36; } |
| .ansi-red-fg { color: #E75C58; } |
| .ansi-red-bg { background-color: #E75C58; } |
| .ansi-red-intense-fg { color: #B22B31; } |
| .ansi-red-intense-bg { background-color: #B22B31; } |
| .ansi-green-fg { color: #00A250; } |
| .ansi-green-bg { background-color: #00A250; } |
| .ansi-green-intense-fg { color: #007427; } |
| .ansi-green-intense-bg { background-color: #007427; } |
| .ansi-yellow-fg { color: #DDB62B; } |
| .ansi-yellow-bg { background-color: #DDB62B; } |
| .ansi-yellow-intense-fg { color: #B27D12; } |
| .ansi-yellow-intense-bg { background-color: #B27D12; } |
| .ansi-blue-fg { color: #208FFB; } |
| .ansi-blue-bg { background-color: #208FFB; } |
| .ansi-blue-intense-fg { color: #0065CA; } |
| .ansi-blue-intense-bg { background-color: #0065CA; } |
| .ansi-magenta-fg { color: #D160C4; } |
| .ansi-magenta-bg { background-color: #D160C4; } |
| .ansi-magenta-intense-fg { color: #A03196; } |
| .ansi-magenta-intense-bg { background-color: #A03196; } |
| .ansi-cyan-fg { color: #60C6C8; } |
| .ansi-cyan-bg { background-color: #60C6C8; } |
| .ansi-cyan-intense-fg { color: #258F8F; } |
| .ansi-cyan-intense-bg { background-color: #258F8F; } |
| .ansi-white-fg { color: #C5C1B4; } |
| .ansi-white-bg { background-color: #C5C1B4; } |
| .ansi-white-intense-fg { color: #A1A6B2; } |
| .ansi-white-intense-bg { background-color: #A1A6B2; } |
| |
| .ansi-default-inverse-fg { color: #FFFFFF; } |
| .ansi-default-inverse-bg { background-color: #000000; } |
| |
| .ansi-bold { font-weight: bold; } |
| .ansi-underline { text-decoration: underline; } |
| |
| |
| div.nbinput.container div.input_area div[class*=highlight] > pre, |
| div.nboutput.container div.output_area div[class*=highlight] > pre, |
| div.nboutput.container div.output_area div[class*=highlight].math, |
| div.nboutput.container div.output_area.rendered_html, |
| div.nboutput.container div.output_area > div.output_javascript, |
| div.nboutput.container div.output_area:not(.rendered_html) > img{ |
| padding: 5px; |
| margin: 0; |
| } |
| |
| /* fix copybtn overflow problem in chromium (needed for 'sphinx_copybutton') */ |
| div.nbinput.container div.input_area > div[class^='highlight'], |
| div.nboutput.container div.output_area > div[class^='highlight']{ |
| overflow-y: hidden; |
| } |
| |
| /* hide copybtn icon on prompts (needed for 'sphinx_copybutton') */ |
| .prompt a.copybtn { |
| display: none; |
| } |
| |
| /* Some additional styling taken form the Jupyter notebook CSS */ |
| div.rendered_html table { |
| border: none; |
| border-collapse: collapse; |
| border-spacing: 0; |
| color: black; |
| font-size: 12px; |
| table-layout: fixed; |
| } |
| div.rendered_html thead { |
| border-bottom: 1px solid black; |
| vertical-align: bottom; |
| } |
| div.rendered_html tr, |
| div.rendered_html th, |
| div.rendered_html td { |
| text-align: right; |
| vertical-align: middle; |
| padding: 0.5em 0.5em; |
| line-height: normal; |
| white-space: normal; |
| max-width: none; |
| border: none; |
| } |
| div.rendered_html th { |
| font-weight: bold; |
| } |
| div.rendered_html tbody tr:nth-child(odd) { |
| background: #f5f5f5; |
| } |
| div.rendered_html tbody tr:hover { |
| background: rgba(66, 165, 245, 0.2); |
| } |
| </style> |
| <div class="section" id="Testing-PySpark"> |
| <h1>Testing PySpark<a class="headerlink" href="#Testing-PySpark" title="Permalink to this headline">¶</a></h1> |
| <p>This guide is a reference for writing robust tests for PySpark code.</p> |
| <p>To view the docs for PySpark test utils, see here. To see the code for PySpark built-in test utils, check out the Spark repository here. To see the JIRA board tickets for the PySpark test framework, see here.</p> |
| <div class="section" id="Build-a-PySpark-Application"> |
| <h2>Build a PySpark Application<a class="headerlink" href="#Build-a-PySpark-Application" title="Permalink to this headline">¶</a></h2> |
| <p>Here is an example for how to start a PySpark application. Feel free to skip to the next section, “Testing your PySpark Application,” if you already have an application you’re ready to test.</p> |
| <p>First, start your Spark Session.</p> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[3]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.sql</span><span class="w"> </span><span class="kn">import</span> <span class="n">SparkSession</span> |
| <span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.sql.functions</span><span class="w"> </span><span class="kn">import</span> <span class="n">col</span> |
| |
| <span class="c1"># Create a SparkSession</span> |
| <span class="n">spark</span> <span class="o">=</span> <span class="n">SparkSession</span><span class="o">.</span><span class="n">builder</span><span class="o">.</span><span class="n">appName</span><span class="p">(</span><span class="s2">"Testing PySpark Example"</span><span class="p">)</span><span class="o">.</span><span class="n">getOrCreate</span><span class="p">()</span> |
| </pre></div> |
| </div> |
| </div> |
| <p>Next, create a DataFrame.</p> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[5]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="n">sample_data</span> <span class="o">=</span> <span class="p">[{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"John D."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">30</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Alice G."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">25</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Bob T."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">35</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Eve A."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">28</span><span class="p">}]</span> |
| |
| <span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">sample_data</span><span class="p">)</span> |
| </pre></div> |
| </div> |
| </div> |
| <p>Now, let’s define and apply a transformation function to our DataFrame.</p> |
| <div class="nbinput docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[7]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.sql.functions</span><span class="w"> </span><span class="kn">import</span> <span class="n">col</span><span class="p">,</span> <span class="n">regexp_replace</span> |
| |
| <span class="c1"># Remove additional spaces in name</span> |
| <span class="k">def</span><span class="w"> </span><span class="nf">remove_extra_spaces</span><span class="p">(</span><span class="n">df</span><span class="p">,</span> <span class="n">column_name</span><span class="p">):</span> |
| <span class="c1"># Remove extra spaces from the specified column</span> |
| <span class="n">df_transformed</span> <span class="o">=</span> <span class="n">df</span><span class="o">.</span><span class="n">withColumn</span><span class="p">(</span><span class="n">column_name</span><span class="p">,</span> <span class="n">regexp_replace</span><span class="p">(</span><span class="n">col</span><span class="p">(</span><span class="n">column_name</span><span class="p">),</span> <span class="s2">"</span><span class="se">\\</span><span class="s2">s+"</span><span class="p">,</span> <span class="s2">" "</span><span class="p">))</span> |
| |
| <span class="k">return</span> <span class="n">df_transformed</span> |
| |
| <span class="n">transformed_df</span> <span class="o">=</span> <span class="n">remove_extra_spaces</span><span class="p">(</span><span class="n">df</span><span class="p">,</span> <span class="s2">"name"</span><span class="p">)</span> |
| |
| <span class="n">transformed_df</span><span class="o">.</span><span class="n">show</span><span class="p">()</span> |
| </pre></div> |
| </div> |
| </div> |
| <div class="nboutput nblast docutils container"> |
| <div class="prompt empty docutils container"> |
| </div> |
| <div class="output_area docutils container"> |
| <div class="highlight"><pre> |
| +---+--------+ |
| |age| name| |
| +---+--------+ |
| | 30| John D.| |
| | 25|Alice G.| |
| | 35| Bob T.| |
| | 28| Eve A.| |
| +---+--------+ |
| |
| </pre></div></div> |
| </div> |
| </div> |
| <div class="section" id="Testing-your-PySpark-Application"> |
| <h2>Testing your PySpark Application<a class="headerlink" href="#Testing-your-PySpark-Application" title="Permalink to this headline">¶</a></h2> |
| <p>Now let’s test our PySpark transformation function.</p> |
| <p>One option is to simply eyeball the resulting DataFrame. However, this can be impractical for large DataFrame or input sizes.</p> |
| <p>A better way is to write tests. Here are some examples of how we can test our code. The examples below apply for Spark 3.5 and above versions.</p> |
| <p>Note that these examples are not exhaustive, as there are many other test framework alternatives which you can use instead of <code class="docutils literal notranslate"><span class="pre">unittest</span></code> or <code class="docutils literal notranslate"><span class="pre">pytest</span></code>. The built-in PySpark testing util functions are standalone, meaning they can be compatible with any test framework or CI test pipeline.</p> |
| <div class="section" id="Option-1:-Using-Only-PySpark-Built-in-Test-Utility-Functions"> |
| <h3>Option 1: Using Only PySpark Built-in Test Utility Functions<a class="headerlink" href="#Option-1:-Using-Only-PySpark-Built-in-Test-Utility-Functions" title="Permalink to this headline">¶</a></h3> |
| <p>For simple ad-hoc validation cases, PySpark testing utils like <code class="docutils literal notranslate"><span class="pre">assertDataFrameEqual</span></code> and <code class="docutils literal notranslate"><span class="pre">assertSchemaEqual</span></code> can be used in a standalone context. You could easily test PySpark code in a notebook session. For example, say you want to assert equality between two DataFrames:</p> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[10]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="kn">import</span><span class="w"> </span><span class="nn">pyspark.testing</span> |
| <span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.testing.utils</span><span class="w"> </span><span class="kn">import</span> <span class="n">assertDataFrameEqual</span> |
| |
| <span class="c1"># Example 1</span> |
| <span class="n">df1</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">data</span><span class="o">=</span><span class="p">[(</span><span class="s2">"1"</span><span class="p">,</span> <span class="mi">1000</span><span class="p">),</span> <span class="p">(</span><span class="s2">"2"</span><span class="p">,</span> <span class="mi">3000</span><span class="p">)],</span> <span class="n">schema</span><span class="o">=</span><span class="p">[</span><span class="s2">"id"</span><span class="p">,</span> <span class="s2">"amount"</span><span class="p">])</span> |
| <span class="n">df2</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">data</span><span class="o">=</span><span class="p">[(</span><span class="s2">"1"</span><span class="p">,</span> <span class="mi">1000</span><span class="p">),</span> <span class="p">(</span><span class="s2">"2"</span><span class="p">,</span> <span class="mi">3000</span><span class="p">)],</span> <span class="n">schema</span><span class="o">=</span><span class="p">[</span><span class="s2">"id"</span><span class="p">,</span> <span class="s2">"amount"</span><span class="p">])</span> |
| <span class="n">assertDataFrameEqual</span><span class="p">(</span><span class="n">df1</span><span class="p">,</span> <span class="n">df2</span><span class="p">)</span> <span class="c1"># pass, DataFrames are identical</span> |
| </pre></div> |
| </div> |
| </div> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[11]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="c1"># Example 2</span> |
| <span class="n">df1</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">data</span><span class="o">=</span><span class="p">[(</span><span class="s2">"1"</span><span class="p">,</span> <span class="mf">0.1</span><span class="p">),</span> <span class="p">(</span><span class="s2">"2"</span><span class="p">,</span> <span class="mf">3.23</span><span class="p">)],</span> <span class="n">schema</span><span class="o">=</span><span class="p">[</span><span class="s2">"id"</span><span class="p">,</span> <span class="s2">"amount"</span><span class="p">])</span> |
| <span class="n">df2</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">data</span><span class="o">=</span><span class="p">[(</span><span class="s2">"1"</span><span class="p">,</span> <span class="mf">0.109</span><span class="p">),</span> <span class="p">(</span><span class="s2">"2"</span><span class="p">,</span> <span class="mf">3.23</span><span class="p">)],</span> <span class="n">schema</span><span class="o">=</span><span class="p">[</span><span class="s2">"id"</span><span class="p">,</span> <span class="s2">"amount"</span><span class="p">])</span> |
| <span class="n">assertDataFrameEqual</span><span class="p">(</span><span class="n">df1</span><span class="p">,</span> <span class="n">df2</span><span class="p">,</span> <span class="n">rtol</span><span class="o">=</span><span class="mf">1e-1</span><span class="p">)</span> <span class="c1"># pass, DataFrames are approx equal by rtol</span> |
| </pre></div> |
| </div> |
| </div> |
| <p>You can also simply compare two DataFrame schemas:</p> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[13]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.testing.utils</span><span class="w"> </span><span class="kn">import</span> <span class="n">assertSchemaEqual</span> |
| <span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.sql.types</span><span class="w"> </span><span class="kn">import</span> <span class="n">StructType</span><span class="p">,</span> <span class="n">StructField</span><span class="p">,</span> <span class="n">ArrayType</span><span class="p">,</span> <span class="n">DoubleType</span> |
| |
| <span class="n">s1</span> <span class="o">=</span> <span class="n">StructType</span><span class="p">([</span><span class="n">StructField</span><span class="p">(</span><span class="s2">"names"</span><span class="p">,</span> <span class="n">ArrayType</span><span class="p">(</span><span class="n">DoubleType</span><span class="p">(),</span> <span class="kc">True</span><span class="p">),</span> <span class="kc">True</span><span class="p">)])</span> |
| <span class="n">s2</span> <span class="o">=</span> <span class="n">StructType</span><span class="p">([</span><span class="n">StructField</span><span class="p">(</span><span class="s2">"names"</span><span class="p">,</span> <span class="n">ArrayType</span><span class="p">(</span><span class="n">DoubleType</span><span class="p">(),</span> <span class="kc">True</span><span class="p">),</span> <span class="kc">True</span><span class="p">)])</span> |
| |
| <span class="n">assertSchemaEqual</span><span class="p">(</span><span class="n">s1</span><span class="p">,</span> <span class="n">s2</span><span class="p">)</span> <span class="c1"># pass, schemas are identical</span> |
| </pre></div> |
| </div> |
| </div> |
| </div> |
| <div class="section" id="Option-2:-Using-Unit-Test"> |
| <h3>Option 2: Using <a class="reference external" href="https://docs.python.org/3/library/unittest.html">Unit Test</a><a class="headerlink" href="#Option-2:-Using-Unit-Test" title="Permalink to this headline">¶</a></h3> |
| <p>For more complex testing scenarios, you may want to use a testing framework.</p> |
| <p>One of the most popular testing framework options is unit tests. Let’s walk through how you can use the built-in Python <code class="docutils literal notranslate"><span class="pre">unittest</span></code> library to write PySpark tests. For more information about the <code class="docutils literal notranslate"><span class="pre">unittest</span></code> library, see here: <a class="reference external" href="https://docs.python.org/3/library/unittest.html">https://docs.python.org/3/library/unittest.html</a>.</p> |
| <p>First, you will need a Spark session. You can use the <code class="docutils literal notranslate"><span class="pre">@classmethod</span></code> decorator from the <code class="docutils literal notranslate"><span class="pre">unittest</span></code> package to take care of setting up and tearing down a Spark session.</p> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[15]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="kn">import</span><span class="w"> </span><span class="nn">unittest</span> |
| |
| <span class="k">class</span><span class="w"> </span><span class="nc">PySparkTestCase</span><span class="p">(</span><span class="n">unittest</span><span class="o">.</span><span class="n">TestCase</span><span class="p">):</span> |
| <span class="nd">@classmethod</span> |
| <span class="k">def</span><span class="w"> </span><span class="nf">setUpClass</span><span class="p">(</span><span class="bp">cls</span><span class="p">):</span> |
| <span class="bp">cls</span><span class="o">.</span><span class="n">spark</span> <span class="o">=</span> <span class="n">SparkSession</span><span class="o">.</span><span class="n">builder</span><span class="o">.</span><span class="n">appName</span><span class="p">(</span><span class="s2">"Testing PySpark Example"</span><span class="p">)</span><span class="o">.</span><span class="n">getOrCreate</span><span class="p">()</span> |
| |
| |
| <span class="nd">@classmethod</span> |
| <span class="k">def</span><span class="w"> </span><span class="nf">tearDownClass</span><span class="p">(</span><span class="bp">cls</span><span class="p">):</span> |
| <span class="bp">cls</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span> |
| </pre></div> |
| </div> |
| </div> |
| <p>Now let’s write a <code class="docutils literal notranslate"><span class="pre">unittest</span></code> class.</p> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[17]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.testing.utils</span><span class="w"> </span><span class="kn">import</span> <span class="n">assertDataFrameEqual</span> |
| |
| <span class="k">class</span><span class="w"> </span><span class="nc">TestTranformation</span><span class="p">(</span><span class="n">PySparkTestCase</span><span class="p">):</span> |
| <span class="k">def</span><span class="w"> </span><span class="nf">test_single_space</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> |
| <span class="n">sample_data</span> <span class="o">=</span> <span class="p">[{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"John D."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">30</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Alice G."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">25</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Bob T."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">35</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Eve A."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">28</span><span class="p">}]</span> |
| |
| <span class="c1"># Create a Spark DataFrame</span> |
| <span class="n">original_df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">sample_data</span><span class="p">)</span> |
| |
| <span class="c1"># Apply the transformation function from before</span> |
| <span class="n">transformed_df</span> <span class="o">=</span> <span class="n">remove_extra_spaces</span><span class="p">(</span><span class="n">original_df</span><span class="p">,</span> <span class="s2">"name"</span><span class="p">)</span> |
| |
| <span class="n">expected_data</span> <span class="o">=</span> <span class="p">[{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"John D."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">30</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Alice G."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">25</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Bob T."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">35</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Eve A."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">28</span><span class="p">}]</span> |
| |
| <span class="n">expected_df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">expected_data</span><span class="p">)</span> |
| |
| <span class="n">assertDataFrameEqual</span><span class="p">(</span><span class="n">transformed_df</span><span class="p">,</span> <span class="n">expected_df</span><span class="p">)</span> |
| |
| </pre></div> |
| </div> |
| </div> |
| <p>When run, <code class="docutils literal notranslate"><span class="pre">unittest</span></code> will pick up all functions with a name beginning with “test.”</p> |
| </div> |
| <div class="section" id="Option-3:-Using-Pytest"> |
| <h3>Option 3: Using <a class="reference external" href="https://docs.pytest.org/en/7.1.x/contents.html">Pytest</a><a class="headerlink" href="#Option-3:-Using-Pytest" title="Permalink to this headline">¶</a></h3> |
| <p>We can also write our tests with <code class="docutils literal notranslate"><span class="pre">pytest</span></code>, which is one of the most popular Python testing frameworks. For more information about <code class="docutils literal notranslate"><span class="pre">pytest</span></code>, see the docs here: <a class="reference external" href="https://docs.pytest.org/en/7.1.x/contents.html">https://docs.pytest.org/en/7.1.x/contents.html</a>.</p> |
| <p>Using a <code class="docutils literal notranslate"><span class="pre">pytest</span></code> fixture allows us to share a spark session across tests, tearing it down when the tests are complete.</p> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[20]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="kn">import</span><span class="w"> </span><span class="nn">pytest</span> |
| |
| <span class="nd">@pytest</span><span class="o">.</span><span class="n">fixture</span> |
| <span class="k">def</span><span class="w"> </span><span class="nf">spark_fixture</span><span class="p">():</span> |
| <span class="n">spark</span> <span class="o">=</span> <span class="n">SparkSession</span><span class="o">.</span><span class="n">builder</span><span class="o">.</span><span class="n">appName</span><span class="p">(</span><span class="s2">"Testing PySpark Example"</span><span class="p">)</span><span class="o">.</span><span class="n">getOrCreate</span><span class="p">()</span> |
| <span class="k">yield</span> <span class="n">spark</span> |
| </pre></div> |
| </div> |
| </div> |
| <p>We can then define our tests like this:</p> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[22]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="kn">import</span><span class="w"> </span><span class="nn">pytest</span> |
| <span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.testing.utils</span><span class="w"> </span><span class="kn">import</span> <span class="n">assertDataFrameEqual</span> |
| |
| <span class="k">def</span><span class="w"> </span><span class="nf">test_single_space</span><span class="p">(</span><span class="n">spark_fixture</span><span class="p">):</span> |
| <span class="n">sample_data</span> <span class="o">=</span> <span class="p">[{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"John D."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">30</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Alice G."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">25</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Bob T."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">35</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Eve A."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">28</span><span class="p">}]</span> |
| |
| <span class="c1"># Create a Spark DataFrame</span> |
| <span class="n">original_df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">sample_data</span><span class="p">)</span> |
| |
| <span class="c1"># Apply the transformation function from before</span> |
| <span class="n">transformed_df</span> <span class="o">=</span> <span class="n">remove_extra_spaces</span><span class="p">(</span><span class="n">original_df</span><span class="p">,</span> <span class="s2">"name"</span><span class="p">)</span> |
| |
| <span class="n">expected_data</span> <span class="o">=</span> <span class="p">[{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"John D."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">30</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Alice G."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">25</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Bob T."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">35</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Eve A."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">28</span><span class="p">}]</span> |
| |
| <span class="n">expected_df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">expected_data</span><span class="p">)</span> |
| |
| <span class="n">assertDataFrameEqual</span><span class="p">(</span><span class="n">transformed_df</span><span class="p">,</span> <span class="n">expected_df</span><span class="p">)</span> |
| </pre></div> |
| </div> |
| </div> |
| <p>When you run your test file with the <code class="docutils literal notranslate"><span class="pre">pytest</span></code> command, it will pick up all functions that have their name beginning with “test.”</p> |
| </div> |
| </div> |
| <div class="section" id="Putting-It-All-Together!"> |
| <h2>Putting It All Together!<a class="headerlink" href="#Putting-It-All-Together!" title="Permalink to this headline">¶</a></h2> |
| <p>Let’s see all the steps together, in a Unit Test example.</p> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[25]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="c1"># pkg/etl.py</span> |
| <span class="kn">import</span><span class="w"> </span><span class="nn">unittest</span> |
| |
| <span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.sql</span><span class="w"> </span><span class="kn">import</span> <span class="n">SparkSession</span> |
| <span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.sql.functions</span><span class="w"> </span><span class="kn">import</span> <span class="n">col</span> |
| <span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.sql.functions</span><span class="w"> </span><span class="kn">import</span> <span class="n">regexp_replace</span> |
| <span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.testing.utils</span><span class="w"> </span><span class="kn">import</span> <span class="n">assertDataFrameEqual</span> |
| |
| <span class="c1"># Create a SparkSession</span> |
| <span class="n">spark</span> <span class="o">=</span> <span class="n">SparkSession</span><span class="o">.</span><span class="n">builder</span><span class="o">.</span><span class="n">appName</span><span class="p">(</span><span class="s2">"Sample PySpark ETL"</span><span class="p">)</span><span class="o">.</span><span class="n">getOrCreate</span><span class="p">()</span> |
| |
| <span class="n">sample_data</span> <span class="o">=</span> <span class="p">[{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"John D."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">30</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Alice G."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">25</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Bob T."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">35</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Eve A."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">28</span><span class="p">}]</span> |
| |
| <span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">sample_data</span><span class="p">)</span> |
| |
| <span class="c1"># Define DataFrame transformation function</span> |
| <span class="k">def</span><span class="w"> </span><span class="nf">remove_extra_spaces</span><span class="p">(</span><span class="n">df</span><span class="p">,</span> <span class="n">column_name</span><span class="p">):</span> |
| <span class="c1"># Remove extra spaces from the specified column using regexp_replace</span> |
| <span class="n">df_transformed</span> <span class="o">=</span> <span class="n">df</span><span class="o">.</span><span class="n">withColumn</span><span class="p">(</span><span class="n">column_name</span><span class="p">,</span> <span class="n">regexp_replace</span><span class="p">(</span><span class="n">col</span><span class="p">(</span><span class="n">column_name</span><span class="p">),</span> <span class="s2">"</span><span class="se">\\</span><span class="s2">s+"</span><span class="p">,</span> <span class="s2">" "</span><span class="p">))</span> |
| |
| <span class="k">return</span> <span class="n">df_transformed</span> |
| </pre></div> |
| </div> |
| </div> |
| <div class="nbinput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[26]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="c1"># pkg/test_etl.py</span> |
| <span class="kn">import</span><span class="w"> </span><span class="nn">unittest</span> |
| |
| <span class="kn">from</span><span class="w"> </span><span class="nn">pyspark.sql</span><span class="w"> </span><span class="kn">import</span> <span class="n">SparkSession</span> |
| |
| <span class="c1"># Define unit test base class</span> |
| <span class="k">class</span><span class="w"> </span><span class="nc">PySparkTestCase</span><span class="p">(</span><span class="n">unittest</span><span class="o">.</span><span class="n">TestCase</span><span class="p">):</span> |
| <span class="nd">@classmethod</span> |
| <span class="k">def</span><span class="w"> </span><span class="nf">setUpClass</span><span class="p">(</span><span class="bp">cls</span><span class="p">):</span> |
| <span class="bp">cls</span><span class="o">.</span><span class="n">spark</span> <span class="o">=</span> <span class="n">SparkSession</span><span class="o">.</span><span class="n">builder</span><span class="o">.</span><span class="n">appName</span><span class="p">(</span><span class="s2">"Sample PySpark ETL"</span><span class="p">)</span><span class="o">.</span><span class="n">getOrCreate</span><span class="p">()</span> |
| |
| <span class="nd">@classmethod</span> |
| <span class="k">def</span><span class="w"> </span><span class="nf">tearDownClass</span><span class="p">(</span><span class="bp">cls</span><span class="p">):</span> |
| <span class="bp">cls</span><span class="o">.</span><span class="n">spark</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span> |
| |
| <span class="c1"># Define unit test</span> |
| <span class="k">class</span><span class="w"> </span><span class="nc">TestTranformation</span><span class="p">(</span><span class="n">PySparkTestCase</span><span class="p">):</span> |
| <span class="k">def</span><span class="w"> </span><span class="nf">test_single_space</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> |
| <span class="n">sample_data</span> <span class="o">=</span> <span class="p">[{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"John D."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">30</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Alice G."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">25</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Bob T."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">35</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Eve A."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">28</span><span class="p">}]</span> |
| |
| <span class="c1"># Create a Spark DataFrame</span> |
| <span class="n">original_df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">sample_data</span><span class="p">)</span> |
| |
| <span class="c1"># Apply the transformation function from before</span> |
| <span class="n">transformed_df</span> <span class="o">=</span> <span class="n">remove_extra_spaces</span><span class="p">(</span><span class="n">original_df</span><span class="p">,</span> <span class="s2">"name"</span><span class="p">)</span> |
| |
| <span class="n">expected_data</span> <span class="o">=</span> <span class="p">[{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"John D."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">30</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Alice G."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">25</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Bob T."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">35</span><span class="p">},</span> |
| <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span> <span class="s2">"Eve A."</span><span class="p">,</span> <span class="s2">"age"</span><span class="p">:</span> <span class="mi">28</span><span class="p">}]</span> |
| |
| <span class="n">expected_df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">expected_data</span><span class="p">)</span> |
| |
| <span class="n">assertDataFrameEqual</span><span class="p">(</span><span class="n">transformed_df</span><span class="p">,</span> <span class="n">expected_df</span><span class="p">)</span> |
| </pre></div> |
| </div> |
| </div> |
| <div class="nbinput docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[27]: |
| </pre></div> |
| </div> |
| <div class="input_area highlight-ipython3 notranslate"><div class="highlight"><pre> |
| <span></span><span class="n">unittest</span><span class="o">.</span><span class="n">main</span><span class="p">(</span><span class="n">argv</span><span class="o">=</span><span class="p">[</span><span class="s1">''</span><span class="p">],</span> <span class="n">verbosity</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">exit</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span> |
| </pre></div> |
| </div> |
| </div> |
| <div class="nboutput docutils container"> |
| <div class="prompt empty docutils container"> |
| </div> |
| <div class="output_area docutils container"> |
| <div class="highlight"><pre> |
| Ran 1 test in 1.734s |
| |
| OK |
| </pre></div></div> |
| </div> |
| <div class="nboutput nblast docutils container"> |
| <div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[27]: |
| </pre></div> |
| </div> |
| <div class="output_area docutils container"> |
| <div class="highlight"><pre> |
| <unittest.main.TestProgram at 0x174539db0> |
| </pre></div></div> |
| </div> |
| </div> |
| </div> |
| |
| |
| </div> |
| |
| |
| <!-- Previous / next buttons --> |
| <div class='prev-next-area'> |
| <a class='left-prev' id="prev-link" href="quickstart_ps.html" title="previous page"> |
| <i class="fas fa-angle-left"></i> |
| <div class="prev-next-info"> |
| <p class="prev-next-subtitle">previous</p> |
| <p class="prev-next-title">Quickstart: Pandas API on Spark</p> |
| </div> |
| </a> |
| <a class='right-next' id="next-link" href="../user_guide/index.html" title="next page"> |
| <div class="prev-next-info"> |
| <p class="prev-next-subtitle">next</p> |
| <p class="prev-next-title">User Guides</p> |
| </div> |
| <i class="fas fa-angle-right"></i> |
| </a> |
| </div> |
| |
| </main> |
| |
| |
| </div> |
| </div> |
| |
| <script src="../_static/scripts/pydata-sphinx-theme.js?digest=1999514e3f237ded88cf"></script> |
| <footer class="footer mt-5 mt-md-0"> |
| <div class="container"> |
| |
| <div class="footer-item"> |
| <p class="copyright"> |
| © Copyright .<br> |
| </p> |
| </div> |
| |
| <div class="footer-item"> |
| <p class="sphinx-version"> |
| Created using <a href="http://sphinx-doc.org/">Sphinx</a> 3.0.4.<br> |
| </p> |
| </div> |
| |
| </div> |
| </footer> |
| </body> |
| </html> |