blob: c43c78e8a4c2e27ea21a9e9cf189443834f8d89a [file] [log] [blame]
<!DOCTYPE html>
<!--[if lt IE 7]> <html class="no-js lt-ie9 lt-ie8 lt-ie7"> <![endif]-->
<!--[if IE 7]> <html class="no-js lt-ie9 lt-ie8"> <![endif]-->
<!--[if IE 8]> <html class="no-js lt-ie9"> <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js"> <!--<![endif]-->
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Migration Guide: Spark Core - Spark 3.5.0 Documentation</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/css/bootstrap.min.css" rel="stylesheet"
integrity="sha384-EVSTQN3/azprG1Anm3QDgpJLIm9Nao0Yz1ztcQTwFspd3yD65VohhpuuCOmLASjC" crossorigin="anonymous">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,wght@0,400;0,500;0,700;1,400;1,500;1,700&Courier+Prime:wght@400;700&display=swap" rel="stylesheet">
<link href="css/custom.css" rel="stylesheet">
<script src="js/vendor/modernizr-2.6.1-respond-1.1.0.min.js"></script>
<link rel="stylesheet" href="css/pygments-default.css">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
<link rel="stylesheet" href="css/docsearch.css">
</head>
<body class="global">
<!--[if lt IE 7]>
<p class="chromeframe">You are using an outdated browser. <a href="https://browsehappy.com/">Upgrade your browser today</a> or <a href="http://www.google.com/chromeframe/?redirect=true">install Google Chrome Frame</a> to better experience this site.</p>
<![endif]-->
<!-- This code is taken from http://twitter.github.com/bootstrap/examples/hero.html -->
<nav class="navbar navbar-expand-lg navbar-dark p-0 px-4 fixed-top" style="background: #1d6890;" id="topbar">
<div class="navbar-brand"><a href="index.html">
<img src="img/spark-logo-rev.svg" width="141" height="72"/></a><span class="version">3.5.0</span>
</div>
<button class="navbar-toggler" type="button" data-toggle="collapse"
data-target="#navbarCollapse" aria-controls="navbarCollapse"
aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarCollapse">
<ul class="navbar-nav me-auto">
<li class="nav-item"><a href="index.html" class="nav-link">Overview</a></li>
<li class="nav-item dropdown">
<a href="#" class="nav-link dropdown-toggle" id="navbarQuickStart" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Programming Guides</a>
<div class="dropdown-menu" aria-labelledby="navbarQuickStart">
<a class="dropdown-item" href="quick-start.html">Quick Start</a>
<a class="dropdown-item" href="rdd-programming-guide.html">RDDs, Accumulators, Broadcasts Vars</a>
<a class="dropdown-item" href="sql-programming-guide.html">SQL, DataFrames, and Datasets</a>
<a class="dropdown-item" href="structured-streaming-programming-guide.html">Structured Streaming</a>
<a class="dropdown-item" href="streaming-programming-guide.html">Spark Streaming (DStreams)</a>
<a class="dropdown-item" href="ml-guide.html">MLlib (Machine Learning)</a>
<a class="dropdown-item" href="graphx-programming-guide.html">GraphX (Graph Processing)</a>
<a class="dropdown-item" href="sparkr.html">SparkR (R on Spark)</a>
<a class="dropdown-item" href="api/python/getting_started/index.html">PySpark (Python on Spark)</a>
</div>
</li>
<li class="nav-item dropdown">
<a href="#" class="nav-link dropdown-toggle" id="navbarAPIDocs" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">API Docs</a>
<div class="dropdown-menu" aria-labelledby="navbarAPIDocs">
<a class="dropdown-item" href="api/scala/org/apache/spark/index.html">Scala</a>
<a class="dropdown-item" href="api/java/index.html">Java</a>
<a class="dropdown-item" href="api/python/index.html">Python</a>
<a class="dropdown-item" href="api/R/index.html">R</a>
<a class="dropdown-item" href="api/sql/index.html">SQL, Built-in Functions</a>
</div>
</li>
<li class="nav-item dropdown">
<a href="#" class="nav-link dropdown-toggle" id="navbarDeploying" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Deploying</a>
<div class="dropdown-menu" aria-labelledby="navbarDeploying">
<a class="dropdown-item" href="cluster-overview.html">Overview</a>
<a class="dropdown-item" href="submitting-applications.html">Submitting Applications</a>
<div class="dropdown-divider"></div>
<a class="dropdown-item" href="spark-standalone.html">Spark Standalone</a>
<a class="dropdown-item" href="running-on-mesos.html">Mesos</a>
<a class="dropdown-item" href="running-on-yarn.html">YARN</a>
<a class="dropdown-item" href="running-on-kubernetes.html">Kubernetes</a>
</div>
</li>
<li class="nav-item dropdown">
<a href="#" class="nav-link dropdown-toggle" id="navbarMore" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">More</a>
<div class="dropdown-menu" aria-labelledby="navbarMore">
<a class="dropdown-item" href="configuration.html">Configuration</a>
<a class="dropdown-item" href="monitoring.html">Monitoring</a>
<a class="dropdown-item" href="tuning.html">Tuning Guide</a>
<a class="dropdown-item" href="job-scheduling.html">Job Scheduling</a>
<a class="dropdown-item" href="security.html">Security</a>
<a class="dropdown-item" href="hardware-provisioning.html">Hardware Provisioning</a>
<a class="dropdown-item" href="migration-guide.html">Migration Guide</a>
<div class="dropdown-divider"></div>
<a class="dropdown-item" href="building-spark.html">Building Spark</a>
<a class="dropdown-item" href="https://spark.apache.org/contributing.html">Contributing to Spark</a>
<a class="dropdown-item" href="https://spark.apache.org/third-party-projects.html">Third Party Projects</a>
</div>
</li>
<li class="nav-item">
<input type="text" id="docsearch-input" placeholder="Search the docs…">
</li>
</ul>
<!--<span class="navbar-text navbar-right"><span class="version-text">v3.5.0</span></span>-->
</div>
</nav>
<div class="container">
<div class="content mr-3" id="content">
<h1 class="title">Migration Guide: Spark Core</h1>
<ul id="markdown-toc">
<li><a href="#upgrading-from-core-33-to-34" id="markdown-toc-upgrading-from-core-33-to-34">Upgrading from Core 3.3 to 3.4</a></li>
<li><a href="#upgrading-from-core-32-to-33" id="markdown-toc-upgrading-from-core-32-to-33">Upgrading from Core 3.2 to 3.3</a></li>
<li><a href="#upgrading-from-core-31-to-32" id="markdown-toc-upgrading-from-core-31-to-32">Upgrading from Core 3.1 to 3.2</a></li>
<li><a href="#upgrading-from-core-30-to-31" id="markdown-toc-upgrading-from-core-30-to-31">Upgrading from Core 3.0 to 3.1</a></li>
<li><a href="#upgrading-from-core-24-to-30" id="markdown-toc-upgrading-from-core-24-to-30">Upgrading from Core 2.4 to 3.0</a></li>
</ul>
<h2 id="upgrading-from-core-33-to-34">Upgrading from Core 3.3 to 3.4</h2>
<ul>
<li>
<p>Since Spark 3.4, Spark driver will own <code class="language-plaintext highlighter-rouge">PersistentVolumnClaim</code>s and try to reuse if they are not assigned to live executors. To restore the behavior before Spark 3.4, you can set <code class="language-plaintext highlighter-rouge">spark.kubernetes.driver.ownPersistentVolumeClaim</code> to <code class="language-plaintext highlighter-rouge">false</code> and <code class="language-plaintext highlighter-rouge">spark.kubernetes.driver.reusePersistentVolumeClaim</code> to <code class="language-plaintext highlighter-rouge">false</code>.</p>
</li>
<li>
<p>Since Spark 3.4, Spark driver will track shuffle data when dynamic allocation is enabled without shuffle service. To restore the behavior before Spark 3.4, you can set <code class="language-plaintext highlighter-rouge">spark.dynamicAllocation.shuffleTracking.enabled</code> to <code class="language-plaintext highlighter-rouge">false</code>.</p>
</li>
<li>
<p>Since Spark 3.4, Spark will try to decommission cached RDD and shuffle blocks if both <code class="language-plaintext highlighter-rouge">spark.decommission.enabled</code> and <code class="language-plaintext highlighter-rouge">spark.storage.decommission.enabled</code> are true. To restore the behavior before Spark 3.4, you can set both <code class="language-plaintext highlighter-rouge">spark.storage.decommission.rddBlocks.enabled</code> and <code class="language-plaintext highlighter-rouge">spark.storage.decommission.shuffleBlocks.enabled</code> to <code class="language-plaintext highlighter-rouge">false</code>.</p>
</li>
<li>
<p>Since Spark 3.4, Spark will use RocksDB store if <code class="language-plaintext highlighter-rouge">spark.history.store.hybridStore.enabled</code> is true. To restore the behavior before Spark 3.4, you can set <code class="language-plaintext highlighter-rouge">spark.history.store.hybridStore.diskBackend</code> to <code class="language-plaintext highlighter-rouge">LEVELDB</code>.</p>
</li>
</ul>
<h2 id="upgrading-from-core-32-to-33">Upgrading from Core 3.2 to 3.3</h2>
<ul>
<li>Since Spark 3.3, Spark migrates its log4j dependency from 1.x to 2.x because log4j 1.x has reached end of life and is no longer supported by the community. Vulnerabilities reported after August 2015 against log4j 1.x were not checked and will not be fixed. Users should rewrite original log4j properties files using log4j2 syntax (XML, JSON, YAML, or properties format). Spark rewrites the <code class="language-plaintext highlighter-rouge">conf/log4j.properties.template</code> which is included in Spark distribution, to <code class="language-plaintext highlighter-rouge">conf/log4j2.properties.template</code> with log4j2 properties format.</li>
</ul>
<h2 id="upgrading-from-core-31-to-32">Upgrading from Core 3.1 to 3.2</h2>
<ul>
<li>
<p>Since Spark 3.2, <code class="language-plaintext highlighter-rouge">spark.scheduler.allocation.file</code> supports read remote file using hadoop filesystem which means if the path has no scheme Spark will respect hadoop configuration to read it. To restore the behavior before Spark 3.2, you can specify the local scheme for <code class="language-plaintext highlighter-rouge">spark.scheduler.allocation.file</code> e.g. <code class="language-plaintext highlighter-rouge">file:///path/to/file</code>.</p>
</li>
<li>
<p>Since Spark 3.2, <code class="language-plaintext highlighter-rouge">spark.hadoopRDD.ignoreEmptySplits</code> is set to <code class="language-plaintext highlighter-rouge">true</code> by default which means Spark will not create empty partitions for empty input splits. To restore the behavior before Spark 3.2, you can set <code class="language-plaintext highlighter-rouge">spark.hadoopRDD.ignoreEmptySplits</code> to <code class="language-plaintext highlighter-rouge">false</code>.</p>
</li>
<li>
<p>Since Spark 3.2, <code class="language-plaintext highlighter-rouge">spark.eventLog.compression.codec</code> is set to <code class="language-plaintext highlighter-rouge">zstd</code> by default which means Spark will not fallback to use <code class="language-plaintext highlighter-rouge">spark.io.compression.codec</code> anymore.</p>
</li>
<li>
<p>Since Spark 3.2, <code class="language-plaintext highlighter-rouge">spark.storage.replication.proactive</code> is enabled by default which means Spark tries to replenish in case of the loss of cached RDD block replicas due to executor failures. To restore the behavior before Spark 3.2, you can set <code class="language-plaintext highlighter-rouge">spark.storage.replication.proactive</code> to <code class="language-plaintext highlighter-rouge">false</code>.</p>
</li>
<li>
<p>In Spark 3.2, <code class="language-plaintext highlighter-rouge">spark.launcher.childConectionTimeout</code> is deprecated (typo) though still works. Use <code class="language-plaintext highlighter-rouge">spark.launcher.childConnectionTimeout</code> instead.</p>
</li>
<li>
<p>In Spark 3.2, support for Apache Mesos as a resource manager is deprecated and will be removed in a future version.</p>
</li>
<li>
<p>In Spark 3.2, Spark will delete K8s driver service resource when the application terminates by itself. To restore the behavior before Spark 3.2, you can set <code class="language-plaintext highlighter-rouge">spark.kubernetes.driver.service.deleteOnTermination</code> to <code class="language-plaintext highlighter-rouge">false</code>.</p>
</li>
</ul>
<h2 id="upgrading-from-core-30-to-31">Upgrading from Core 3.0 to 3.1</h2>
<ul>
<li>
<p>In Spark 3.0 and below, <code class="language-plaintext highlighter-rouge">SparkContext</code> can be created in executors. Since Spark 3.1, an exception will be thrown when creating <code class="language-plaintext highlighter-rouge">SparkContext</code> in executors. You can allow it by setting the configuration <code class="language-plaintext highlighter-rouge">spark.executor.allowSparkContext</code> when creating <code class="language-plaintext highlighter-rouge">SparkContext</code> in executors.</p>
</li>
<li>
<p>In Spark 3.0 and below, Spark propagated the Hadoop classpath from <code class="language-plaintext highlighter-rouge">yarn.application.classpath</code> and <code class="language-plaintext highlighter-rouge">mapreduce.application.classpath</code> into the Spark application submitted to YARN when Spark distribution is with the built-in Hadoop. Since Spark 3.1, it does not propagate anymore when the Spark distribution is with the built-in Hadoop in order to prevent the failure from the different transitive dependencies picked up from the Hadoop cluster such as Guava and Jackson. To restore the behavior before Spark 3.1, you can set <code class="language-plaintext highlighter-rouge">spark.yarn.populateHadoopClasspath</code> to <code class="language-plaintext highlighter-rouge">true</code>.</p>
</li>
</ul>
<h2 id="upgrading-from-core-24-to-30">Upgrading from Core 2.4 to 3.0</h2>
<ul>
<li>
<p>The <code class="language-plaintext highlighter-rouge">org.apache.spark.ExecutorPlugin</code> interface and related configuration has been replaced with
<code class="language-plaintext highlighter-rouge">org.apache.spark.api.plugin.SparkPlugin</code>, which adds new functionality. Plugins using the old
interface must be modified to extend the new interfaces. Check the
<a href="monitoring.html">Monitoring</a> guide for more details.</p>
</li>
<li>
<p>Deprecated method <code class="language-plaintext highlighter-rouge">TaskContext.isRunningLocally</code> has been removed. Local execution was removed and it always has returned <code class="language-plaintext highlighter-rouge">false</code>.</p>
</li>
<li>
<p>Deprecated method <code class="language-plaintext highlighter-rouge">shuffleBytesWritten</code>, <code class="language-plaintext highlighter-rouge">shuffleWriteTime</code> and <code class="language-plaintext highlighter-rouge">shuffleRecordsWritten</code> in <code class="language-plaintext highlighter-rouge">ShuffleWriteMetrics</code> have been removed. Instead, use <code class="language-plaintext highlighter-rouge">bytesWritten</code>, <code class="language-plaintext highlighter-rouge">writeTime </code> and <code class="language-plaintext highlighter-rouge">recordsWritten</code> respectively.</p>
</li>
<li>
<p>Deprecated method <code class="language-plaintext highlighter-rouge">AccumulableInfo.apply</code> have been removed because creating <code class="language-plaintext highlighter-rouge">AccumulableInfo</code> is disallowed.</p>
</li>
<li>
<p>Deprecated accumulator v1 APIs have been removed and please use v2 APIs instead.</p>
</li>
<li>
<p>Event log file will be written as UTF-8 encoding, and Spark History Server will replay event log files as UTF-8 encoding. Previously Spark wrote the event log file as default charset of driver JVM process, so Spark History Server of Spark 2.x is needed to read the old event log files in case of incompatible encoding.</p>
</li>
<li>
<p>A new protocol for fetching shuffle blocks is used. It&#8217;s recommended that external shuffle services be upgraded when running Spark 3.0 apps. You can still use old external shuffle services by setting the configuration <code class="language-plaintext highlighter-rouge">spark.shuffle.useOldFetchProtocol</code> to <code class="language-plaintext highlighter-rouge">true</code>. Otherwise, Spark may run into errors with messages like <code class="language-plaintext highlighter-rouge">IllegalArgumentException: Unexpected message type: &lt;number&gt;</code>.</p>
</li>
<li>
<p><code class="language-plaintext highlighter-rouge">SPARK_WORKER_INSTANCES</code> is deprecated in Standalone mode. It&#8217;s recommended to launch multiple executors in one worker and launch one worker per node instead of launching multiple workers per node and launching one executor per worker.</p>
</li>
</ul>
</div>
<!-- /container -->
</div>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/js/bootstrap.bundle.min.js"
integrity="sha384-MrcW6ZMFYlzcLA8Nl+NtUVF0sA7MsXsP1UyJoMp4YLEuNSfAP+JcXn/tWtIaxVXM"
crossorigin="anonymous"></script>
<script src="https://code.jquery.com/jquery.js"></script>
<script src="js/vendor/anchor.min.js"></script>
<script src="js/main.js"></script>
<script type="text/javascript" src="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.js"></script>
<script type="text/javascript">
// DocSearch is entirely free and automated. DocSearch is built in two parts:
// 1. a crawler which we run on our own infrastructure every 24 hours. It follows every link
// in your website and extract content from every page it traverses. It then pushes this
// content to an Algolia index.
// 2. a JavaScript snippet to be inserted in your website that will bind this Algolia index
// to your search input and display its results in a dropdown UI. If you want to find more
// details on how works DocSearch, check the docs of DocSearch.
docsearch({
apiKey: 'd62f962a82bc9abb53471cb7b89da35e',
appId: 'RAI69RXRSK',
indexName: 'apache_spark',
inputSelector: '#docsearch-input',
enhancedSearchInput: true,
algoliaOptions: {
'facetFilters': ["version:3.5.0"]
},
debug: false // Set debug to true if you want to inspect the dropdown
});
</script>
<!-- MathJax Section -->
<script type="text/x-mathjax-config">
MathJax.Hub.Config({
TeX: { equationNumbers: { autoNumber: "AMS" } }
});
</script>
<script>
// Note that we load MathJax this way to work with local file (file://), HTTP and HTTPS.
// We could use "//cdn.mathjax...", but that won't support "file://".
(function(d, script) {
script = d.createElement('script');
script.type = 'text/javascript';
script.async = true;
script.onload = function(){
MathJax.Hub.Config({
tex2jax: {
inlineMath: [ ["$", "$"], ["\\\\(","\\\\)"] ],
displayMath: [ ["$$","$$"], ["\\[", "\\]"] ],
processEscapes: true,
skipTags: ['script', 'noscript', 'style', 'textarea', 'pre']
}
});
};
script.src = ('https:' == document.location.protocol ? 'https://' : 'http://') +
'cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js' +
'?config=TeX-AMS-MML_HTMLorMML';
d.getElementsByTagName('head')[0].appendChild(script);
}(document));
</script>
</body>
</html>