blob: a640219cb88eb8d8e3f52363c8fffbda27ece188 [file] [log] [blame]
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>
Security | Apache Spark
</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/css/bootstrap.min.css" rel="stylesheet"
integrity="sha384-EVSTQN3/azprG1Anm3QDgpJLIm9Nao0Yz1ztcQTwFspd3yD65VohhpuuCOmLASjC" crossorigin="anonymous">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,wght@0,400;0,500;0,700;1,400;1,500;1,700&Courier+Prime:wght@400;700&display=swap" rel="stylesheet">
<link href="/css/custom.css" rel="stylesheet">
<!-- Code highlighter CSS -->
<link href="/css/pygments-default.css" rel="stylesheet">
<link rel="icon" href="/favicon.ico" type="image/x-icon">
<!-- Matomo -->
<script>
var _paq = window._paq = window._paq || [];
/* tracker methods like "setCustomDimension" should be called before "trackPageView" */
_paq.push(["disableCookies"]);
_paq.push(['trackPageView']);
_paq.push(['enableLinkTracking']);
(function() {
var u="https://analytics.apache.org/";
_paq.push(['setTrackerUrl', u+'matomo.php']);
_paq.push(['setSiteId', '40']);
var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0];
g.async=true; g.src=u+'matomo.js'; s.parentNode.insertBefore(g,s);
})();
</script>
<!-- End Matomo Code -->
</head>
<body class="global">
<nav class="navbar navbar-expand-lg navbar-dark p-0 px-4" style="background: #1D6890;">
<a class="navbar-brand" href="/">
<img src="/images/spark-logo-rev.svg" alt="" width="141" height="72">
</a>
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarContent"
aria-controls="navbarContent" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse col-md-12 col-lg-auto pt-4" id="navbarContent">
<ul class="navbar-nav me-auto">
<li class="nav-item">
<a class="nav-link active" aria-current="page" href="/downloads.html">Download</a>
</li>
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" id="libraries" role="button" data-bs-toggle="dropdown"
aria-expanded="false">
Libraries
</a>
<ul class="dropdown-menu" aria-labelledby="libraries">
<li><a class="dropdown-item" href="/sql/">SQL and DataFrames</a></li>
<li><a class="dropdown-item" href="/spark-connect/">Spark Connect</a></li>
<li><a class="dropdown-item" href="/streaming/">Spark Streaming</a></li>
<li><a class="dropdown-item" href="/pandas-on-spark/">pandas on Spark</a></li>
<li><a class="dropdown-item" href="/mllib/">MLlib (machine learning)</a></li>
<li><a class="dropdown-item" href="/graphx/">GraphX (graph)</a></li>
<li>
<hr class="dropdown-divider">
</li>
<li><a class="dropdown-item" href="/third-party-projects.html">Third-Party Projects</a></li>
</ul>
</li>
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" id="documentation" role="button" data-bs-toggle="dropdown"
aria-expanded="false">
Documentation
</a>
<ul class="dropdown-menu" aria-labelledby="documentation">
<li><a class="dropdown-item" href="/docs/latest/">Latest Release</a></li>
<li><a class="dropdown-item" href="/documentation.html">Older Versions and Other Resources</a></li>
<li><a class="dropdown-item" href="/faq.html">Frequently Asked Questions</a></li>
</ul>
</li>
<li class="nav-item">
<a class="nav-link active" aria-current="page" href="/examples.html">Examples</a>
</li>
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" id="community" role="button" data-bs-toggle="dropdown"
aria-expanded="false">
Community
</a>
<ul class="dropdown-menu" aria-labelledby="community">
<li><a class="dropdown-item" href="/community.html">Mailing Lists &amp; Resources</a></li>
<li><a class="dropdown-item" href="/contributing.html">Contributing to Spark</a></li>
<li><a class="dropdown-item" href="/improvement-proposals.html">Improvement Proposals (SPIP)</a>
</li>
<li><a class="dropdown-item" href="https://issues.apache.org/jira/browse/SPARK">Issue Tracker</a>
</li>
<li><a class="dropdown-item" href="/powered-by.html">Powered By</a></li>
<li><a class="dropdown-item" href="/committers.html">Project Committers</a></li>
<li><a class="dropdown-item" href="/history.html">Project History</a></li>
</ul>
</li>
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" id="developers" role="button" data-bs-toggle="dropdown"
aria-expanded="false">
Developers
</a>
<ul class="dropdown-menu" aria-labelledby="developers">
<li><a class="dropdown-item" href="/developer-tools.html">Useful Developer Tools</a></li>
<li><a class="dropdown-item" href="/versioning-policy.html">Versioning Policy</a></li>
<li><a class="dropdown-item" href="/release-process.html">Release Process</a></li>
<li><a class="dropdown-item" href="/security.html">Security</a></li>
</ul>
</li>
</ul>
<ul class="navbar-nav ml-auto">
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" id="apacheFoundation" role="button"
data-bs-toggle="dropdown" aria-expanded="false">
Apache Software Foundation
</a>
<ul class="dropdown-menu" aria-labelledby="apacheFoundation">
<li><a class="dropdown-item" href="https://www.apache.org/">Apache Homepage</a></li>
<li><a class="dropdown-item" href="https://www.apache.org/licenses/">License</a></li>
<li><a class="dropdown-item"
href="https://www.apache.org/foundation/sponsorship.html">Sponsorship</a></li>
<li><a class="dropdown-item" href="https://www.apache.org/foundation/thanks.html">Thanks</a></li>
<li><a class="dropdown-item" href="https://www.apache.org/security/">Security</a></li>
<li><a class="dropdown-item" href="https://www.apache.org/events/current-event">Event</a></li>
</ul>
</li>
</ul>
</div>
</nav>
<div class="container">
<div class="row mt-4">
<div class="col-12 col-md-9">
<h2>Reporting security issues</h2>
<p>Apache Spark uses the standard process outlined by the <a href="https://www.apache.org/security/">Apache Security Team</a>
for reporting vulnerabilities. Note that vulnerabilities should not be publicly disclosed until the project has
responded.</p>
<p>To report a possible security vulnerability, please email <code class="language-plaintext highlighter-rouge">security@spark.apache.org</code>. This is a
non-public list that will reach the Apache Security team, as well as the Spark PMC.</p>
<h2>Known security issues</h2>
<h3 id="CVE-2023-32007">CVE-2023-32007: Apache Spark shell command injection vulnerability via Spark UI</h3>
<p>This CVE is only an update to <a href="#CVE-2022-33891">CVE-2022-33891</a> to clarify that version 3.1.3 is also
affected. It is otherwise not a new vulnerability. Note that Apache Spark 3.1.x is EOL now.</p>
<h3 id="CVE-2023-22946">CVE-2023-22946: Apache Spark proxy-user privilege escalation from malicious configuration class</h3>
<p>Severity: Medium</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions Affected:</p>
<ul>
<li>Versions prior to 3.4.0</li>
</ul>
<p>Description:</p>
<p>In Apache Spark versions prior to 3.4.0, applications using spark-submit can specify a &#8216;proxy-user&#8217; to run as,
limiting privileges. The application can execute code with the privileges of the submitting user, however, by
providing malicious configuration-related classes on the classpath. This affects architectures relying on
proxy-user, for example those using Apache Livy to manage submitted applications.</p>
<p>This issue is being tracked as SPARK-41958</p>
<p>Mitigation:</p>
<ul>
<li>Update to Apache Spark 3.4.0 or later, and ensure that <code class="language-plaintext highlighter-rouge">spark.submit.proxyUser.allowCustomClasspathInClusterMode</code> is set to its default of &#8220;false&#8221;, and is not overridden by submitted applications.</li>
</ul>
<p>Credit:</p>
<ul>
<li>Hideyuki Furue (finder)</li>
<li>Yi Wu (Databricks) (remediation developer)</li>
</ul>
<h3 id="CVE-2022-31777">CVE-2022-31777: Apache Spark XSS vulnerability in log viewer UI Javascript</h3>
<p>Severity: Medium</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions Affected:</p>
<ul>
<li>3.2.1 and earlier</li>
<li>3.3.0</li>
</ul>
<p>Description:</p>
<p>A stored cross-site scripting (XSS) vulnerability in Apache Spark 3.2.1 and earlier, and 3.3.0, allows remote
attackers to execute arbitrary JavaScript in the web browser of a user, by including a malicious payload into
the logs which would be returned in logs rendered in the UI.</p>
<p>Mitigation:</p>
<ul>
<li>Upgrade to Spark 3.2.2, or 3.3.1 or later</li>
</ul>
<p>Credit:</p>
<ul>
<li>Florian Walter (Veracode)</li>
</ul>
<h3 id="CVE-2022-33891">CVE-2022-33891: Apache Spark shell command injection vulnerability via Spark UI</h3>
<p>Severity: Important</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions Affected:</p>
<ul>
<li>3.1.3 and earlier (previously, this was marked as fixed in 3.1.3; this change is tracked as <a href="#CVE-2023-32007">CVE-2023-32007</a>)</li>
<li>3.2.0 to 3.2.1</li>
</ul>
<p>Description:</p>
<p>The Apache Spark UI offers the possibility to enable ACLs via the configuration option spark.acls.enable.
With an authentication filter, this checks whether a user has access permissions to view or modify the application.
If ACLs are enabled, a code path in HttpSecurityFilter can allow someone to perform impersonation by providing an
arbitrary user name. A malicious user might then be able to reach a permission check function that will ultimately
build a Unix shell command based on their input, and execute it. This will result in arbitrary shell command
execution as the user Spark is currently running as.</p>
<p>Mitigation</p>
<ul>
<li>Update to Spark 3.2.2, or 3.3.0 or later</li>
</ul>
<p>Credit:</p>
<ul>
<li>Kostya Torchinsky (Databricks)</li>
</ul>
<h3 id="CVE-2021-38296">CVE-2021-38296: Apache Spark<span class="tm">&trade;</span> Key Negotiation Vulnerability</h3>
<p>Severity: Medium</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions Affected:</p>
<ul>
<li>Apache Spark 3.1.2 and earlier</li>
</ul>
<p>Description:</p>
<p>Apache Spark supports end-to-end encryption of RPC connections via <code class="language-plaintext highlighter-rouge">spark.authenticate</code> and <code class="language-plaintext highlighter-rouge">spark.network.crypto.enabled</code>.
In versions 3.1.2 and earlier, it uses a bespoke mutual authentication protocol that allows for full encryption key
recovery. After an initial interactive attack, this would allow someone to decrypt plaintext traffic offline.
Note that this does not affect security mechanisms controlled by <code class="language-plaintext highlighter-rouge">spark.authenticate.enableSaslEncryption</code>,
<code class="language-plaintext highlighter-rouge">spark.io.encryption.enabled</code>, <code class="language-plaintext highlighter-rouge">spark.ssl</code>, <code class="language-plaintext highlighter-rouge">spark.ui.strictTransportSecurity</code>.</p>
<p>Mitigation:</p>
<ul>
<li>Update to Spark 3.1.3 or later</li>
</ul>
<p>Credit:</p>
<ul>
<li>Steve Weis (Databricks)</li>
</ul>
<h3 id="CVE-2020-9480">CVE-2020-9480: Apache Spark<span class="tm">&trade;</span> RCE vulnerability in auth-enabled standalone master</h3>
<p>Severity: Important</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions Affected:</p>
<ul>
<li>Apache Spark 2.4.5 and earlier</li>
</ul>
<p>Description:</p>
<p>In Apache Spark 2.4.5 and earlier, a standalone resource manager&#8217;s master may
be configured to require authentication (<code class="language-plaintext highlighter-rouge">spark.authenticate</code>) via a
shared secret. When enabled, however, a specially-crafted RPC to the
master can succeed in starting an application&#8217;s resources on the Spark
cluster, even without the shared key. This can be leveraged to execute
shell commands on the host machine.</p>
<p>This does not affect Spark clusters using other resource managers
(YARN, Mesos, etc).</p>
<p>Mitigation:</p>
<ul>
<li>Users should update to Spark 2.4.6 or 3.0.0.</li>
<li>Where possible, network access to the cluster machines should be restricted to trusted hosts only.</li>
</ul>
<p>Credit:</p>
<ul>
<li>Ayoub Elaassal</li>
</ul>
<h3 id="CVE-2019-10099">CVE-2019-10099: Apache Spark<span class="tm">&trade;</span> unencrypted data on local disk</h3>
<p>Severity: Important</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions affected:</p>
<ul>
<li>All Spark 1.x, Spark 2.0.x, Spark 2.1.x, and 2.2.x versions</li>
<li>Spark 2.3.0 to 2.3.2</li>
</ul>
<p>Description:</p>
<p>Prior to Spark 2.3.3, in certain situations Spark would write user data to local disk unencrypted, even if <code class="language-plaintext highlighter-rouge">spark.io.encryption.enabled=true</code>. This includes cached blocks that are fetched to disk (controlled by <code class="language-plaintext highlighter-rouge">spark.maxRemoteBlockSizeFetchToMem</code>); in SparkR, using parallelize; in Pyspark, using broadcast and parallelize; and use of python udfs.</p>
<p>Mitigation:</p>
<ul>
<li>1.x, 2.0.x, 2.1.x, 2.2.x, 2.3.x users should upgrade to 2.3.3 or newer, including 2.4.x</li>
</ul>
<p>Credit:</p>
<ul>
<li>This issue was reported by Thomas Graves of NVIDIA.</li>
</ul>
<h3 id="CVE-2018-11760">CVE-2018-11760: Apache Spark<span class="tm">&trade;</span> local privilege escalation vulnerability</h3>
<p>Severity: Important</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions affected:</p>
<ul>
<li>All Spark 1.x, Spark 2.0.x, and Spark 2.1.x versions</li>
<li>Spark 2.2.0 to 2.2.2</li>
<li>Spark 2.3.0 to 2.3.1</li>
</ul>
<p>Description:</p>
<p>When using PySpark, it&#8217;s possible for a different local user
to connect to the Spark application and impersonate the user running
the Spark application. This affects versions 1.x, 2.0.x, 2.1.x, 2.2.0 to 2.2.2, and 2.3.0 to 2.3.1.</p>
<p>Mitigation:</p>
<ul>
<li>1.x, 2.0.x, 2.1.x, and 2.2.x users should upgrade to 2.2.3 or newer</li>
<li>2.3.x users should upgrade to 2.3.2 or newer</li>
<li>Otherwise, affected users should avoid using PySpark in
multi-user environments.</li>
</ul>
<p>Credit:</p>
<ul>
<li>Luca Canali and Jose Carlos Luna Duran, CERN</li>
</ul>
<h3 id="CVE-2018-17190">CVE-2018-17190: Unsecured Apache Spark<span class="tm">&trade;</span> standalone executes user code</h3>
<p>Severity: Low</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions Affected:</p>
<ul>
<li>All versions of Apache Spark</li>
</ul>
<p>Description:</p>
<p>Spark&#8217;s standalone resource manager accepts code to execute on a &#8216;master&#8217; host, that then runs that code on
&#8216;worker&#8217; hosts. The master itself does not, by design, execute user code. A specially-crafted request to the
master can, however, cause the master to execute code too. Note that this does not affect standalone clusters
with authentication enabled. While the master host typically has less outbound access to other resources
than a worker, the execution of code on the master is nevertheless unexpected.</p>
<p>Mitigation:</p>
<p>Enable authentication on any Spark standalone cluster that is not otherwise secured
from unwanted access, for example by network-level restrictions. Use <code class="language-plaintext highlighter-rouge">spark.authenticate</code>
and related security properties described at <a href="https://spark.apache.org/docs/latest/security.html">https://spark.apache.org/docs/latest/security.html</a></p>
<p>Credit:</p>
<ul>
<li>Andre Protas, Apple Information Security</li>
</ul>
<h3 id="CVE-2018-11804">CVE-2018-11804: Apache Spark<span class="tm">&trade;</span> build/mvn runs zinc, and can expose information from build machines</h3>
<p>Severity: Low</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions Affected</p>
<ul>
<li>2.1.x release branch and earlier</li>
<li>2.2.x release branch before Spark 2.2.3</li>
<li>2.3.x release branch before Spark 2.3.3</li>
</ul>
<p>Description:</p>
<p>Spark&#8217;s Apache Maven-based build includes a convenience script, &#8216;build/mvn&#8217;,
that downloads and runs a zinc server to speed up compilation. This server
will accept connections from external hosts by default. A specially-crafted
request to the zinc server could cause it to reveal information in files
readable to the developer account running the build. Note that this issue
does not affect end users of Spark, only developers building Spark from
source code.</p>
<p>Mitigation:</p>
<ul>
<li>Spark users are not affected, as zinc is only a part of the build process.</li>
<li>Spark developers may simply use a local Maven installation&#8217;s &#8216;mvn&#8217; command to build, and avoid running build/mvn and zinc.</li>
<li>Spark developers building actively-developed branches (2.2.x, 2.3.x, 2.4.x, master) may update their branches to receive mitigations already patched onto the build/mvn script</li>
<li>Spark developers running zinc separately may include &#8220;-server 127.0.0.1&#8221; in its command line, and consider additional flags like &#8220;-idle-timeout 30m&#8221; to achieve similar mitigation.</li>
</ul>
<p>Credit:</p>
<ul>
<li>Andre Protas, Apple Information Security</li>
</ul>
<h3 id="CVE-2018-11770">CVE-2018-11770: Apache Spark<span class="tm">&trade;</span> standalone master, Mesos REST APIs not controlled by authentication</h3>
<p>Severity: Medium</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions Affected:</p>
<ul>
<li>Spark versions from 1.3.0, running standalone master with REST API enabled, or running Mesos master with cluster mode enabled; suggested mitigations resolved the issue as of Spark 2.4.0.</li>
</ul>
<p>Description:</p>
<p>From version 1.3.0 onward, Spark&#8217;s standalone master exposes a REST API for job submission, in addition
to the submission mechanism used by <code class="language-plaintext highlighter-rouge">spark-submit</code>. In standalone, the config property
<code class="language-plaintext highlighter-rouge">spark.authenticate.secret</code> establishes a shared secret for authenticating requests to submit jobs via
<code class="language-plaintext highlighter-rouge">spark-submit</code>. However, the REST API does not use this or any other authentication mechanism, and this is
not adequately documented. In this case, a user would be able to run a driver program without authenticating,
but not launch executors, using the REST API. This REST API is also used by Mesos, when set up to run in
cluster mode (i.e., when also running <code class="language-plaintext highlighter-rouge">MesosClusterDispatcher</code>), for job submission. Future versions of Spark
will improve documentation on these points, and prohibit setting <code class="language-plaintext highlighter-rouge">spark.authenticate.secret</code> when running
the REST APIs, to make this clear. Future versions will also disable the REST API by default in the
standalone master by changing the default value of <code class="language-plaintext highlighter-rouge">spark.master.rest.enabled</code> to <code class="language-plaintext highlighter-rouge">false</code>.</p>
<p>Mitigation:</p>
<p>For standalone masters, disable the REST API by setting <code class="language-plaintext highlighter-rouge">spark.master.rest.enabled</code> to <code class="language-plaintext highlighter-rouge">false</code> if it is unused,
and/or ensure that all network access to the REST API (port 6066 by default) is restricted to hosts that are
trusted to submit jobs. Mesos users can stop the <code class="language-plaintext highlighter-rouge">MesosClusterDispatcher</code>, though that will prevent them
from running jobs in cluster mode. Alternatively, they can ensure access to the <code class="language-plaintext highlighter-rouge">MesosRestSubmissionServer</code>
(port 7077 by default) is restricted to trusted hosts.</p>
<p>Credit:</p>
<ul>
<li>Imran Rashid, Cloudera</li>
<li>Fengwei Zhang, Alibaba Cloud Security Team</li>
</ul>
<h3 id="CVE-2018-8024">CVE-2018-8024: Apache Spark<span class="tm">&trade;</span> XSS vulnerability in UI</h3>
<p>Severity: Medium</p>
<p>Versions Affected:</p>
<ul>
<li>Spark 2.1.0 through 2.1.2</li>
<li>Spark 2.2.0 through 2.2.1</li>
<li>Spark 2.3.0</li>
</ul>
<p>Description:</p>
<p>In Apache Spark 2.1.0 to 2.1.2, 2.2.0 to 2.2.1, and 2.3.0, it&#8217;s possible for a malicious
user to construct a URL pointing to a Spark cluster&#8217;s UI&#8217;s job and stage info pages, and if a user can
be tricked into accessing the URL, can be used to cause script to execute and expose information from
the user&#8217;s view of the Spark UI. While some browsers like recent versions of Chrome and Safari are
able to block this type of attack, current versions of Firefox (and possibly others) do not.</p>
<p>Mitigation:</p>
<ul>
<li>2.1.x users should upgrade to 2.1.3 or newer</li>
<li>2.2.x users should upgrade to 2.2.2 or newer</li>
<li>2.3.x users should upgrade to 2.3.1 or newer</li>
</ul>
<p>Credit:</p>
<ul>
<li>Spencer Gietzen, Rhino Security Labs</li>
</ul>
<h3 id="CVE-2018-1334">CVE-2018-1334: Apache Spark<span class="tm">&trade;</span> local privilege escalation vulnerability</h3>
<p>Severity: High</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions affected:</p>
<ul>
<li>Spark versions through 2.1.2</li>
<li>Spark 2.2.0 to 2.2.1</li>
<li>Spark 2.3.0</li>
</ul>
<p>Description:</p>
<p>In Apache Spark up to and including 2.1.2, 2.2.0 to 2.2.1, and 2.3.0, when using PySpark or SparkR,
it&#8217;s possible for a different local user to connect to the Spark application and impersonate the
user running the Spark application.</p>
<p>Mitigation:</p>
<ul>
<li>1.x, 2.0.x, and 2.1.x users should upgrade to 2.1.3 or newer</li>
<li>2.2.x users should upgrade to 2.2.2 or newer</li>
<li>2.3.x users should upgrade to 2.3.1 or newer</li>
<li>Otherwise, affected users should avoid using PySpark and SparkR in multi-user environments.</li>
</ul>
<p>Credit:</p>
<ul>
<li>Nehmé Tohmé, Cloudera, Inc.</li>
</ul>
<h3 id="CVE-2017-12612">CVE-2017-12612 Unsafe deserialization in Apache Spark<span class="tm">&trade;</span> launcher API</h3>
<p>JIRA: <a href="https://issues.apache.org/jira/browse/SPARK-20922">SPARK-20922</a></p>
<p>Severity: Medium</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions Affected:</p>
<ul>
<li>Versions of Apache Spark from 1.6.0 until 2.1.1</li>
</ul>
<p>Description:</p>
<p>In Apache Spark 1.6.0 until 2.1.1, the launcher API performs unsafe
deserialization of data received by its socket. This makes applications
launched programmatically using the launcher API potentially
vulnerable to arbitrary code execution by an attacker with access to any user
account on the local machine. It does not affect apps run by spark-submit or
spark-shell. The attacker would be able to execute code as the user that ran
the Spark application. Users are encouraged to update to version 2.1.2, 2.2.0 or
later.</p>
<p>Mitigation:</p>
<p>Update to Apache Spark 2.1.2, 2.2.0 or later.</p>
<p>Credit:</p>
<ul>
<li>Aditya Sharad, Semmle</li>
</ul>
<h3 id="CVE-2017-7678">CVE-2017-7678 Apache Spark<span class="tm">&trade;</span> XSS web UI MHTML vulnerability</h3>
<p>JIRA: <a href="https://issues.apache.org/jira/browse/SPARK-20393">SPARK-20393</a></p>
<p>Severity: Medium</p>
<p>Vendor: The Apache Software Foundation</p>
<p>Versions Affected:</p>
<ul>
<li>Versions of Apache Spark before 2.1.2, 2.2.0</li>
</ul>
<p>Description:</p>
<p>It is possible for an attacker to take advantage of a user&#8217;s trust in the server to trick them into
visiting a link that points to a shared Spark cluster and submits data including MHTML to the Spark
master, or history server. This data, which could contain a script, would then be reflected back to
the user and could be evaluated and executed by MS Windows-based clients. It is not an attack on Spark
itself, but on the user, who may then execute the script inadvertently when viewing elements of the
Spark web UIs.</p>
<p>Mitigation:</p>
<p>Update to Apache Spark 2.1.2, 2.2.0 or later.</p>
<p>Example:</p>
<p>Request:</p>
<div class="language-plaintext highlighter-rouge"><div class="highlight"><pre class="highlight"><code>GET /app/?appId=Content-Type:%20multipart/related;%20boundary=_AppScan%0d%0a--
_AppScan%0d%0aContent-Location:foo%0d%0aContent-Transfer-
Encoding:base64%0d%0a%0d%0aPGh0bWw%2bPHNjcmlwdD5hbGVydCgiWFNTIik8L3NjcmlwdD48L2h0bWw%2b%0d%0a
HTTP/1.1
</code></pre></div></div>
<p>Excerpt from response:</p>
<div class="language-plaintext highlighter-rouge"><div class="highlight"><pre class="highlight"><code>&lt;div class="row-fluid"&gt;No running application with ID Content-Type: multipart/related;
boundary=_AppScan
--_AppScan
Content-Location:foo
Content-Transfer-Encoding:base64
PGh0bWw+PHNjcmlwdD5hbGVydCgiWFNTIik8L3NjcmlwdD48L2h0bWw+
&lt;/div&gt;
</code></pre></div></div>
<p>Result: In the above payload the BASE64 data decodes as:</p>
<div class="language-plaintext highlighter-rouge"><div class="highlight"><pre class="highlight"><code>&lt;html&gt;&lt;script&gt;alert("XSS")&lt;/script&gt;&lt;/html&gt;
</code></pre></div></div>
<p>Credit:</p>
<ul>
<li>Mike Kasper, Nicholas Marion</li>
<li>IBM z Systems Center for Secure Engineering</li>
</ul>
</div>
<div class="col-12 col-md-3">
<div class="news" style="margin-bottom: 20px;">
<h5>Latest News</h5>
<ul class="list-unstyled">
<li><a href="/news/spark-3-4-3-released.html">Spark 3.4.3 released</a>
<span class="small">(Apr 18, 2024)</span></li>
<li><a href="/news/spark-3-5-1-released.html">Spark 3.5.1 released</a>
<span class="small">(Feb 23, 2024)</span></li>
<li><a href="/news/spark-3-3-4-released.html">Spark 3.3.4 released</a>
<span class="small">(Dec 16, 2023)</span></li>
<li><a href="/news/spark-3-4-2-released.html">Spark 3.4.2 released</a>
<span class="small">(Nov 30, 2023)</span></li>
</ul>
<p class="small" style="text-align: right;"><a href="/news/index.html">Archive</a></p>
</div>
<div style="text-align:center; margin-bottom: 20px;">
<a href="https://www.apache.org/events/current-event.html">
<img src="https://www.apache.org/events/current-event-234x60.png" style="max-width: 100%;"/>
</a>
</div>
<div class="hidden-xs hidden-sm">
<a href="/downloads.html" class="btn btn-cta btn-lg d-grid" style="margin-bottom: 30px;">
Download Spark
</a>
<p style="font-size: 16px; font-weight: 500; color: #555;">
Built-in Libraries:
</p>
<ul class="list-none">
<li><a href="/sql/">SQL and DataFrames</a></li>
<li><a href="/streaming/">Spark Streaming</a></li>
<li><a href="/mllib/">MLlib (machine learning)</a></li>
<li><a href="/graphx/">GraphX (graph)</a></li>
</ul>
<a href="/third-party-projects.html">Third-Party Projects</a>
</div>
</div>
</div>
<footer class="small">
<hr>
Apache Spark, Spark, Apache, the Apache feather logo, and the Apache Spark project logo are either registered
trademarks or trademarks of The Apache Software Foundation in the United States and other countries.
See guidance on use of Apache Spark <a href="/trademarks.html">trademarks</a>.
All other marks mentioned may be trademarks or registered trademarks of their respective owners.
Copyright &copy; 2018 The Apache Software Foundation, Licensed under the
<a href="https://www.apache.org/licenses/">Apache License, Version 2.0</a>.
</footer>
</div>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/js/bootstrap.bundle.min.js"
integrity="sha384-MrcW6ZMFYlzcLA8Nl+NtUVF0sA7MsXsP1UyJoMp4YLEuNSfAP+JcXn/tWtIaxVXM"
crossorigin="anonymous"></script>
<script src="https://code.jquery.com/jquery.js"></script>
<script src="/js/lang-tabs.js"></script>
<script src="/js/downloads.js"></script>
</body>
</html>