blob: 4a9b2abeae173d8c90e4f32c213d6d7b6438a13d [file] [log] [blame]
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_402) on Mon Apr 15 02:01:53 UTC 2024 -->
<title>Utils (Spark 3.4.3 JavaDoc)</title>
<meta name="date" content="2024-04-15">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Utils (Spark 3.4.3 JavaDoc)";
}
}
catch(err) {
}
//-->
var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":9,"i50":9,"i51":9,"i52":9,"i53":9,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":9,"i82":9,"i83":9,"i84":9,"i85":9,"i86":9,"i87":9,"i88":9,"i89":9,"i90":9,"i91":9,"i92":9,"i93":9,"i94":9,"i95":9,"i96":9,"i97":9,"i98":9,"i99":9,"i100":9,"i101":9,"i102":9,"i103":9,"i104":9,"i105":9,"i106":9,"i107":9,"i108":9,"i109":9,"i110":9,"i111":9,"i112":9,"i113":9,"i114":9,"i115":9,"i116":9,"i117":9,"i118":9,"i119":9,"i120":9,"i121":9,"i122":9,"i123":9,"i124":9,"i125":9,"i126":9,"i127":9,"i128":9,"i129":9,"i130":9,"i131":9,"i132":9,"i133":9,"i134":9,"i135":9,"i136":9,"i137":9,"i138":9,"i139":9,"i140":9,"i141":9,"i142":9,"i143":9,"i144":9,"i145":9,"i146":9,"i147":9,"i148":9,"i149":9,"i150":9,"i151":9,"i152":9,"i153":9,"i154":9,"i155":9,"i156":9,"i157":9,"i158":9,"i159":9,"i160":9,"i161":9,"i162":9,"i163":9,"i164":9,"i165":9,"i166":9,"i167":9};
var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/apache/spark/util/ThreadUtils.html" title="class in org.apache.spark.util"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
<li><a href="../../../../org/apache/spark/util/VersionUtils.html" title="class in org.apache.spark.util"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/apache/spark/util/Utils.html" target="_top">Frames</a></li>
<li><a href="Utils.html" target="_top">No&nbsp;Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary:&nbsp;</li>
<li>Nested&nbsp;|&nbsp;</li>
<li>Field&nbsp;|&nbsp;</li>
<li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail:&nbsp;</li>
<li>Field&nbsp;|&nbsp;</li>
<li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">org.apache.spark.util</div>
<h2 title="Class Utils" class="title">Class Utils</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>Object</li>
<li>
<ul class="inheritance">
<li>org.apache.spark.util.Utils</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<hr>
<br>
<pre>public class <span class="typeNameLabel">Utils</span>
extends Object</pre>
<div class="block">Various utility methods used by Spark.</div>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#Utils--">Utils</a></span>()</code>&nbsp;</td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t1" class="tableTab"><span><a href="javascript:show(1);">Static Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#BACKUP_STANDALONE_MASTER_PREFIX--">BACKUP_STANDALONE_MASTER_PREFIX</a></span>()</code>
<div class="block">An identifier that backup masters use in their responses.</div>
</td>
</tr>
<tr id="i1" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#buildLocationMetadata-scala.collection.Seq-int-">buildLocationMetadata</a></span>(scala.collection.Seq&lt;org.apache.hadoop.fs.Path&gt;&nbsp;paths,
int&nbsp;stopAppendingThreshold)</code>
<div class="block">Convert a sequence of <code>Path</code>s to a metadata string.</div>
</td>
</tr>
<tr id="i2" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#bytesToString-scala.math.BigInt-">bytesToString</a></span>(scala.math.BigInt&nbsp;size)</code>&nbsp;</td>
</tr>
<tr id="i3" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#bytesToString-long-">bytesToString</a></span>(long&nbsp;size)</code>
<div class="block">Convert a quantity in bytes to a human-readable string such as "4.0 MiB".</div>
</td>
</tr>
<tr id="i4" class="altColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#byteStringAsBytes-java.lang.String-">byteStringAsBytes</a></span>(String&nbsp;str)</code>
<div class="block">Convert a passed byte string (e.g.</div>
</td>
</tr>
<tr id="i5" class="rowColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#byteStringAsGb-java.lang.String-">byteStringAsGb</a></span>(String&nbsp;str)</code>
<div class="block">Convert a passed byte string (e.g.</div>
</td>
</tr>
<tr id="i6" class="altColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#byteStringAsKb-java.lang.String-">byteStringAsKb</a></span>(String&nbsp;str)</code>
<div class="block">Convert a passed byte string (e.g.</div>
</td>
</tr>
<tr id="i7" class="rowColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#byteStringAsMb-java.lang.String-">byteStringAsMb</a></span>(String&nbsp;str)</code>
<div class="block">Convert a passed byte string (e.g.</div>
</td>
</tr>
<tr id="i8" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#checkAndGetK8sMasterUrl-java.lang.String-">checkAndGetK8sMasterUrl</a></span>(String&nbsp;rawMasterURL)</code>
<div class="block">Check the validity of the given Kubernetes master URL and return the resolved URL.</div>
</td>
</tr>
<tr id="i9" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#checkHost-java.lang.String-">checkHost</a></span>(String&nbsp;host)</code>
<div class="block">Checks if the host contains only valid hostname/ip without port
NOTE: Incase of IPV6 ip it should be enclosed inside []</div>
</td>
</tr>
<tr id="i10" class="altColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#checkHostPort-java.lang.String-">checkHostPort</a></span>(String&nbsp;hostPort)</code>&nbsp;</td>
</tr>
<tr id="i11" class="rowColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#checkOffHeapEnabled-org.apache.spark.SparkConf-long-">checkOffHeapEnabled</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;sparkConf,
long&nbsp;offHeapSize)</code>
<div class="block">return 0 if MEMORY_OFFHEAP_ENABLED is false.</div>
</td>
</tr>
<tr id="i12" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#chmod700-java.io.File-">chmod700</a></span>(java.io.File&nbsp;file)</code>
<div class="block">JDK equivalent of <code>chmod 700 file</code>.</div>
</td>
</tr>
<tr id="i13" class="rowColor">
<td class="colFirst"><code>static &lt;C&gt;&nbsp;Class&lt;C&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#classForName-java.lang.String-boolean-boolean-">classForName</a></span>(String&nbsp;className,
boolean&nbsp;initialize,
boolean&nbsp;noSparkClassLoader)</code>
<div class="block">Preferred alternative to Class.forName(className), as well as
Class.forName(className, initialize, loader) with current thread's ContextClassLoader.</div>
</td>
</tr>
<tr id="i14" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#classIsLoadable-java.lang.String-">classIsLoadable</a></span>(String&nbsp;clazz)</code>
<div class="block">Determines whether the provided class is loadable in the current thread.</div>
</td>
</tr>
<tr id="i15" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#clone-T-org.apache.spark.serializer.SerializerInstance-scala.reflect.ClassTag-">clone</a></span>(T&nbsp;value,
<a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a>&nbsp;serializer,
scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$2)</code>
<div class="block">Clone an object using a Spark serializer.</div>
</td>
</tr>
<tr id="i16" class="altColor">
<td class="colFirst"><code>static java.util.Properties</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#cloneProperties-java.util.Properties-">cloneProperties</a></span>(java.util.Properties&nbsp;props)</code>
<div class="block">Create a new properties object with the same values as `props`</div>
</td>
</tr>
<tr id="i17" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#copyFileStreamNIO-java.nio.channels.FileChannel-java.nio.channels.WritableByteChannel-long-long-">copyFileStreamNIO</a></span>(java.nio.channels.FileChannel&nbsp;input,
java.nio.channels.WritableByteChannel&nbsp;output,
long&nbsp;startPosition,
long&nbsp;bytesToCopy)</code>&nbsp;</td>
</tr>
<tr id="i18" class="altColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#copyStream-java.io.InputStream-java.io.OutputStream-boolean-boolean-">copyStream</a></span>(java.io.InputStream&nbsp;in,
java.io.OutputStream&nbsp;out,
boolean&nbsp;closeStreams,
boolean&nbsp;transferToEnabled)</code>
<div class="block">Copy all data from an InputStream to an OutputStream.</div>
</td>
</tr>
<tr id="i19" class="rowColor">
<td class="colFirst"><code>static java.io.InputStream</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#copyStreamUpTo-java.io.InputStream-long-">copyStreamUpTo</a></span>(java.io.InputStream&nbsp;in,
long&nbsp;maxSize)</code>
<div class="block">Copy the first <code>maxSize</code> bytes of data from the InputStream to an in-memory
buffer, primarily to check for corruption.</div>
</td>
</tr>
<tr id="i20" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createDirectory-java.io.File-">createDirectory</a></span>(java.io.File&nbsp;dir)</code>
<div class="block">Create a directory given the abstract pathname</div>
</td>
</tr>
<tr id="i21" class="rowColor">
<td class="colFirst"><code>static java.io.File</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createDirectory-java.lang.String-java.lang.String-">createDirectory</a></span>(String&nbsp;root,
String&nbsp;namePrefix)</code>
<div class="block">Create a directory inside the given parent directory.</div>
</td>
</tr>
<tr id="i22" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createFailedToGetTokenMessage-java.lang.String-java.lang.Throwable-">createFailedToGetTokenMessage</a></span>(String&nbsp;serviceName,
Throwable&nbsp;e)</code>
<div class="block">Returns a string message about delegation token generation failure</div>
</td>
</tr>
<tr id="i23" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createSecret-org.apache.spark.SparkConf-">createSecret</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>&nbsp;</td>
</tr>
<tr id="i24" class="altColor">
<td class="colFirst"><code>static java.io.File</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createTempDir--">createTempDir</a></span>()</code>
<div class="block">Create a temporary directory inside the <code>java.io.tmpdir</code> prefixed with <code>spark</code>.</div>
</td>
</tr>
<tr id="i25" class="rowColor">
<td class="colFirst"><code>static java.io.File</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createTempDir-java.lang.String-java.lang.String-">createTempDir</a></span>(String&nbsp;root,
String&nbsp;namePrefix)</code>
<div class="block">Create a temporary directory inside the given parent directory.</div>
</td>
</tr>
<tr id="i26" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#decodeFileNameInURI-java.net.URI-">decodeFileNameInURI</a></span>(java.net.URI&nbsp;uri)</code>
<div class="block">Get the file name from uri's raw path and decode it.</div>
</td>
</tr>
<tr id="i27" class="rowColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#DEFAULT_DRIVER_MEM_MB--">DEFAULT_DRIVER_MEM_MB</a></span>()</code>
<div class="block">Define a default value for driver memory here since this value is referenced across the code
base and nearly all files already use Utils.scala</div>
</td>
</tr>
<tr id="i28" class="altColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#deleteRecursively-java.io.File-">deleteRecursively</a></span>(java.io.File&nbsp;file)</code>
<div class="block">Delete a file or directory and its contents recursively.</div>
</td>
</tr>
<tr id="i29" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#deserialize-byte:A-">deserialize</a></span>(byte[]&nbsp;bytes)</code>
<div class="block">Deserialize an object using Java serialization</div>
</td>
</tr>
<tr id="i30" class="altColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#deserialize-byte:A-java.lang.ClassLoader-">deserialize</a></span>(byte[]&nbsp;bytes,
ClassLoader&nbsp;loader)</code>
<div class="block">Deserialize an object using Java serialization and the given ClassLoader</div>
</td>
</tr>
<tr id="i31" class="rowColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#deserializeLongValue-byte:A-">deserializeLongValue</a></span>(byte[]&nbsp;bytes)</code>
<div class="block">Deserialize a Long value (used for <code>org.apache.spark.api.python.PythonPartitioner</code>)</div>
</td>
</tr>
<tr id="i32" class="altColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#deserializeViaNestedStream-java.io.InputStream-org.apache.spark.serializer.SerializerInstance-scala.Function1-">deserializeViaNestedStream</a></span>(java.io.InputStream&nbsp;is,
<a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a>&nbsp;ser,
scala.Function1&lt;<a href="../../../../org/apache/spark/serializer/DeserializationStream.html" title="class in org.apache.spark.serializer">DeserializationStream</a>,scala.runtime.BoxedUnit&gt;&nbsp;f)</code>
<div class="block">Deserialize via nested stream using specific serializer</div>
</td>
</tr>
<tr id="i33" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#doesDirectoryContainAnyNewFiles-java.io.File-long-">doesDirectoryContainAnyNewFiles</a></span>(java.io.File&nbsp;dir,
long&nbsp;cutoff)</code>
<div class="block">Determines if a directory contains any files newer than cutoff seconds.</div>
</td>
</tr>
<tr id="i34" class="altColor">
<td class="colFirst"><code>static java.io.File</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#doFetchFile-java.lang.String-java.io.File-java.lang.String-org.apache.spark.SparkConf-org.apache.hadoop.conf.Configuration-">doFetchFile</a></span>(String&nbsp;url,
java.io.File&nbsp;targetDir,
String&nbsp;filename,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
org.apache.hadoop.conf.Configuration&nbsp;hadoopConf)</code>
<div class="block">Download a file or directory to target directory.</div>
</td>
</tr>
<tr id="i35" class="rowColor">
<td class="colFirst"><code>static scala.collection.immutable.Set&lt;String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#EMPTY_USER_GROUPS--">EMPTY_USER_GROUPS</a></span>()</code>&nbsp;</td>
</tr>
<tr id="i36" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#encodeFileNameToURIRawPath-java.lang.String-">encodeFileNameToURIRawPath</a></span>(String&nbsp;fileName)</code>
<div class="block">A file name may contain some invalid URI characters, such as " ".</div>
</td>
</tr>
<tr id="i37" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#exceptionString-java.lang.Throwable-">exceptionString</a></span>(Throwable&nbsp;e)</code>
<div class="block">Return a nice string representation of the exception.</div>
</td>
</tr>
<tr id="i38" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#executeAndGetOutput-scala.collection.Seq-java.io.File-scala.collection.Map-boolean-">executeAndGetOutput</a></span>(scala.collection.Seq&lt;String&gt;&nbsp;command,
java.io.File&nbsp;workingDir,
scala.collection.Map&lt;String,String&gt;&nbsp;extraEnvironment,
boolean&nbsp;redirectStderr)</code>
<div class="block">Execute a command and get its output, throwing an exception if it yields a code other than 0.</div>
</td>
</tr>
<tr id="i39" class="rowColor">
<td class="colFirst"><code>static Process</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#executeCommand-scala.collection.Seq-java.io.File-scala.collection.Map-boolean-">executeCommand</a></span>(scala.collection.Seq&lt;String&gt;&nbsp;command,
java.io.File&nbsp;workingDir,
scala.collection.Map&lt;String,String&gt;&nbsp;extraEnvironment,
boolean&nbsp;redirectStderr)</code>
<div class="block">Execute a command and return the process running the command.</div>
</td>
</tr>
<tr id="i40" class="altColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#executorOffHeapMemorySizeAsMb-org.apache.spark.SparkConf-">executorOffHeapMemorySizeAsMb</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;sparkConf)</code>
<div class="block">Convert MEMORY_OFFHEAP_SIZE to MB Unit, return 0 if MEMORY_OFFHEAP_ENABLED is false.</div>
</td>
</tr>
<tr id="i41" class="rowColor">
<td class="colFirst"><code>static scala.Tuple2&lt;String,Object&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#extractHostPortFromSparkUrl-java.lang.String-">extractHostPortFromSparkUrl</a></span>(String&nbsp;sparkUrl)</code>
<div class="block">Return a pair of host and port extracted from the <code>sparkUrl</code>.</div>
</td>
</tr>
<tr id="i42" class="altColor">
<td class="colFirst"><code>static java.io.File</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#fetchFile-java.lang.String-java.io.File-org.apache.spark.SparkConf-org.apache.hadoop.conf.Configuration-long-boolean-boolean-">fetchFile</a></span>(String&nbsp;url,
java.io.File&nbsp;targetDir,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
org.apache.hadoop.conf.Configuration&nbsp;hadoopConf,
long&nbsp;timestamp,
boolean&nbsp;useCache,
boolean&nbsp;shouldUntar)</code>
<div class="block">Download a file or directory to target directory.</div>
</td>
</tr>
<tr id="i43" class="rowColor">
<td class="colFirst"><code>static org.apache.spark.util.CallSite</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getCallSite-scala.Function1-">getCallSite</a></span>(scala.Function1&lt;String,Object&gt;&nbsp;skipClass)</code>
<div class="block">When called inside a class in the spark package, returns the name of the user code class
(outside the spark package) that called into Spark, as well as which Spark method they called.</div>
</td>
</tr>
<tr id="i44" class="altColor">
<td class="colFirst"><code>static String[]</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getConfiguredLocalDirs-org.apache.spark.SparkConf-">getConfiguredLocalDirs</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>
<div class="block">Return the configured local directories where Spark can write files.</div>
</td>
</tr>
<tr id="i45" class="rowColor">
<td class="colFirst"><code>static ClassLoader</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getContextOrSparkClassLoader--">getContextOrSparkClassLoader</a></span>()</code>
<div class="block">Get the Context ClassLoader on this thread or, if not present, the ClassLoader that
loaded Spark.</div>
</td>
</tr>
<tr id="i46" class="altColor">
<td class="colFirst"><code>static scala.collection.immutable.Set&lt;String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getCurrentUserGroups-org.apache.spark.SparkConf-java.lang.String-">getCurrentUserGroups</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;sparkConf,
String&nbsp;username)</code>&nbsp;</td>
</tr>
<tr id="i47" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getCurrentUserName--">getCurrentUserName</a></span>()</code>
<div class="block">Returns the current user name.</div>
</td>
</tr>
<tr id="i48" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getDefaultPropertiesFile-scala.collection.Map-">getDefaultPropertiesFile</a></span>(scala.collection.Map&lt;String,String&gt;&nbsp;env)</code>
<div class="block">Return the path of the default Spark properties file.</div>
</td>
</tr>
<tr id="i49" class="rowColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getDynamicAllocationInitialExecutors-org.apache.spark.SparkConf-">getDynamicAllocationInitialExecutors</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>
<div class="block">Return the initial number of executors for dynamic allocation.</div>
</td>
</tr>
<tr id="i50" class="altColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getFileLength-java.io.File-org.apache.spark.SparkConf-">getFileLength</a></span>(java.io.File&nbsp;file,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;workConf)</code>
<div class="block">Return the file length, if the file is compressed it returns the uncompressed file length.</div>
</td>
</tr>
<tr id="i51" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getFormattedClassName-java.lang.Object-">getFormattedClassName</a></span>(Object&nbsp;obj)</code>
<div class="block">Return the class name of the given object, removing all dollar signs</div>
</td>
</tr>
<tr id="i52" class="altColor">
<td class="colFirst"><code>static org.apache.hadoop.fs.FileSystem</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getHadoopFileSystem-java.lang.String-org.apache.hadoop.conf.Configuration-">getHadoopFileSystem</a></span>(String&nbsp;path,
org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
<div class="block">Return a Hadoop FileSystem with the scheme encoded in the given path.</div>
</td>
</tr>
<tr id="i53" class="rowColor">
<td class="colFirst"><code>static org.apache.hadoop.fs.FileSystem</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getHadoopFileSystem-java.net.URI-org.apache.hadoop.conf.Configuration-">getHadoopFileSystem</a></span>(java.net.URI&nbsp;path,
org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
<div class="block">Return a Hadoop FileSystem with the scheme encoded in the given path.</div>
</td>
</tr>
<tr id="i54" class="altColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getIteratorSize-scala.collection.Iterator-">getIteratorSize</a></span>(scala.collection.Iterator&lt;?&gt;&nbsp;iterator)</code>
<div class="block">Counts the number of elements of an iterator.</div>
</td>
</tr>
<tr id="i55" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;scala.collection.Iterator&lt;scala.Tuple2&lt;T,Object&gt;&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getIteratorZipWithIndex-scala.collection.Iterator-long-">getIteratorZipWithIndex</a></span>(scala.collection.Iterator&lt;T&gt;&nbsp;iter,
long&nbsp;startIndex)</code>
<div class="block">Generate a zipWithIndex iterator, avoid index value overflowing problem
in scala's zipWithIndex</div>
</td>
</tr>
<tr id="i56" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getLocalDir-org.apache.spark.SparkConf-">getLocalDir</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>
<div class="block">Get the path of a temporary directory.</div>
</td>
</tr>
<tr id="i57" class="rowColor">
<td class="colFirst"><code>static scala.collection.Seq&lt;String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getLocalUserJarsForShell-org.apache.spark.SparkConf-">getLocalUserJarsForShell</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>
<div class="block">Return the local jar files which will be added to REPL's classpath.</div>
</td>
</tr>
<tr id="i58" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getProcessName--">getProcessName</a></span>()</code>
<div class="block">Returns the name of this JVM process.</div>
</td>
</tr>
<tr id="i59" class="rowColor">
<td class="colFirst"><code>static scala.collection.Map&lt;String,String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getPropertiesFromFile-java.lang.String-">getPropertiesFromFile</a></span>(String&nbsp;filename)</code>
<div class="block">Load properties present in the given file.</div>
</td>
</tr>
<tr id="i60" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getSimpleName-java.lang.Class-">getSimpleName</a></span>(Class&lt;?&gt;&nbsp;cls)</code>
<div class="block">Safer than Class obj's getSimpleName which may throw Malformed class name error in scala.</div>
</td>
</tr>
<tr id="i61" class="rowColor">
<td class="colFirst"><code>static ClassLoader</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getSparkClassLoader--">getSparkClassLoader</a></span>()</code>
<div class="block">Get the ClassLoader which loaded Spark.</div>
</td>
</tr>
<tr id="i62" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getSparkOrYarnConfig-org.apache.spark.SparkConf-java.lang.String-java.lang.String-">getSparkOrYarnConfig</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
String&nbsp;key,
String&nbsp;default_)</code>
<div class="block">Return the value of a config either through the SparkConf or the Hadoop configuration.</div>
</td>
</tr>
<tr id="i63" class="rowColor">
<td class="colFirst"><code>static scala.Option&lt;String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getStderr-java.lang.Process-long-">getStderr</a></span>(Process&nbsp;process,
long&nbsp;timeoutMs)</code>
<div class="block">Return the stderr of a process after waiting for the process to terminate.</div>
</td>
</tr>
<tr id="i64" class="altColor">
<td class="colFirst"><code>static scala.collection.Map&lt;String,String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getSystemProperties--">getSystemProperties</a></span>()</code>
<div class="block">Returns the system properties map that is thread-safe to iterator over.</div>
</td>
</tr>
<tr id="i65" class="rowColor">
<td class="colFirst"><code>static <a href="../../../../org/apache/spark/status/api/v1/ThreadStackTrace.html" title="class in org.apache.spark.status.api.v1">ThreadStackTrace</a>[]</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getThreadDump--">getThreadDump</a></span>()</code>
<div class="block">Return a thread dump of all threads' stacktraces.</div>
</td>
</tr>
<tr id="i66" class="altColor">
<td class="colFirst"><code>static scala.Option&lt;<a href="../../../../org/apache/spark/status/api/v1/ThreadStackTrace.html" title="class in org.apache.spark.status.api.v1">ThreadStackTrace</a>&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getThreadDumpForThread-long-">getThreadDumpForThread</a></span>(long&nbsp;threadId)</code>&nbsp;</td>
</tr>
<tr id="i67" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getUsedTimeNs-long-">getUsedTimeNs</a></span>(long&nbsp;startTimeNs)</code>
<div class="block">Return the string to tell how long has passed in milliseconds.</div>
</td>
</tr>
<tr id="i68" class="altColor">
<td class="colFirst"><code>static scala.collection.Seq&lt;String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getUserJars-org.apache.spark.SparkConf-">getUserJars</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>
<div class="block">Return the jar files pointed by the "spark.jars" property.</div>
</td>
</tr>
<tr id="i69" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#initDaemon-org.slf4j.Logger-">initDaemon</a></span>(org.slf4j.Logger&nbsp;log)</code>
<div class="block">Utility function that should be called early in <code>main()</code> for daemons to set up some common
diagnostic state.</div>
</td>
</tr>
<tr id="i70" class="altColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#instantiateSerializerFromConf-org.apache.spark.internal.config.ConfigEntry-org.apache.spark.SparkConf-boolean-">instantiateSerializerFromConf</a></span>(org.apache.spark.internal.config.ConfigEntry&lt;String&gt;&nbsp;propertyName,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
boolean&nbsp;isDriver)</code>&nbsp;</td>
</tr>
<tr id="i71" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#instantiateSerializerOrShuffleManager-java.lang.String-org.apache.spark.SparkConf-boolean-">instantiateSerializerOrShuffleManager</a></span>(String&nbsp;className,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
boolean&nbsp;isDriver)</code>&nbsp;</td>
</tr>
<tr id="i72" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isAbsoluteURI-java.lang.String-">isAbsoluteURI</a></span>(String&nbsp;path)</code>
<div class="block">Check whether a path is an absolute URI.</div>
</td>
</tr>
<tr id="i73" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isBindCollision-java.lang.Throwable-">isBindCollision</a></span>(Throwable&nbsp;exception)</code>
<div class="block">Return whether the exception is caused by an address-port collision when binding.</div>
</td>
</tr>
<tr id="i74" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isClientMode-org.apache.spark.SparkConf-">isClientMode</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>&nbsp;</td>
</tr>
<tr id="i75" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isDynamicAllocationEnabled-org.apache.spark.SparkConf-">isDynamicAllocationEnabled</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>
<div class="block">Return whether dynamic allocation is enabled in the given conf.</div>
</td>
</tr>
<tr id="i76" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isFatalError-java.lang.Throwable-">isFatalError</a></span>(Throwable&nbsp;e)</code>
<div class="block">Returns true if the given exception was fatal.</div>
</td>
</tr>
<tr id="i77" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isFileSplittable-org.apache.hadoop.fs.Path-org.apache.hadoop.io.compress.CompressionCodecFactory-">isFileSplittable</a></span>(org.apache.hadoop.fs.Path&nbsp;path,
org.apache.hadoop.io.compress.CompressionCodecFactory&nbsp;codecFactory)</code>
<div class="block">Check whether the file of the path is splittable.</div>
</td>
</tr>
<tr id="i78" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isG1GC--">isG1GC</a></span>()</code>&nbsp;</td>
</tr>
<tr id="i79" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isInDirectory-java.io.File-java.io.File-">isInDirectory</a></span>(java.io.File&nbsp;parent,
java.io.File&nbsp;child)</code>
<div class="block">Return whether the specified file is a parent directory of the child file.</div>
</td>
</tr>
<tr id="i80" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isInRunningSparkTask--">isInRunningSparkTask</a></span>()</code>
<div class="block">Returns if the current codes are running in a Spark task, e.g., in executors.</div>
</td>
</tr>
<tr id="i81" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isLocalMaster-org.apache.spark.SparkConf-">isLocalMaster</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>&nbsp;</td>
</tr>
<tr id="i82" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isLocalUri-java.lang.String-">isLocalUri</a></span>(String&nbsp;uri)</code>
<div class="block">Returns whether the URI is a "local:" URI.</div>
</td>
</tr>
<tr id="i83" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isMac--">isMac</a></span>()</code>
<div class="block">Whether the underlying operating system is Mac OS X.</div>
</td>
</tr>
<tr id="i84" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isMacOnAppleSilicon--">isMacOnAppleSilicon</a></span>()</code>
<div class="block">Whether the underlying operating system is Mac OS X and processor is Apple Silicon.</div>
</td>
</tr>
<tr id="i85" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isMemberClass-java.lang.Class-">isMemberClass</a></span>(Class&lt;?&gt;&nbsp;cls)</code>
<div class="block">Returns true if and only if the underlying class is a member class.</div>
</td>
</tr>
<tr id="i86" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isPushBasedShuffleEnabled-org.apache.spark.SparkConf-boolean-boolean-">isPushBasedShuffleEnabled</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
boolean&nbsp;isDriver,
boolean&nbsp;checkSerializer)</code>
<div class="block">Push based shuffle can only be enabled when below conditions are met:
- the application is submitted to run in YARN mode
- external shuffle service enabled
- IO encryption disabled
- serializer(such as KryoSerializer) supports relocation of serialized objects</div>
</td>
</tr>
<tr id="i87" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isStreamingDynamicAllocationEnabled-org.apache.spark.SparkConf-">isStreamingDynamicAllocationEnabled</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>&nbsp;</td>
</tr>
<tr id="i88" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isTesting--">isTesting</a></span>()</code>
<div class="block">Indicates whether Spark is currently running unit tests.</div>
</td>
</tr>
<tr id="i89" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isWindows--">isWindows</a></span>()</code>
<div class="block">Whether the underlying operating system is Windows.</div>
</td>
</tr>
<tr id="i90" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#libraryPathEnvName--">libraryPathEnvName</a></span>()</code>
<div class="block">Return the current system LD_LIBRARY_PATH name</div>
</td>
</tr>
<tr id="i91" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#libraryPathEnvPrefix-scala.collection.Seq-">libraryPathEnvPrefix</a></span>(scala.collection.Seq&lt;String&gt;&nbsp;libraryPaths)</code>
<div class="block">Return the prefix of a command that appends the given library paths to the
system-specific library path environment variable.</div>
</td>
</tr>
<tr id="i92" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#loadDefaultSparkProperties-org.apache.spark.SparkConf-java.lang.String-">loadDefaultSparkProperties</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
String&nbsp;filePath)</code>
<div class="block">Load default Spark properties from the given file.</div>
</td>
</tr>
<tr id="i93" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;scala.collection.Seq&lt;T&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#loadExtensions-java.lang.Class-scala.collection.Seq-org.apache.spark.SparkConf-">loadExtensions</a></span>(Class&lt;T&gt;&nbsp;extClass,
scala.collection.Seq&lt;String&gt;&nbsp;classes,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>
<div class="block">Create instances of extension classes.</div>
</td>
</tr>
<tr id="i94" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#LOCAL_SCHEME--">LOCAL_SCHEME</a></span>()</code>
<div class="block">Scheme used for files that are locally available on worker nodes in the cluster.</div>
</td>
</tr>
<tr id="i95" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#localCanonicalHostName--">localCanonicalHostName</a></span>()</code>
<div class="block">Get the local machine's FQDN.</div>
</td>
</tr>
<tr id="i96" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#localHostName--">localHostName</a></span>()</code>
<div class="block">Get the local machine's hostname.</div>
</td>
</tr>
<tr id="i97" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#localHostNameForURI--">localHostNameForURI</a></span>()</code>
<div class="block">Get the local machine's URI.</div>
</td>
</tr>
<tr id="i98" class="altColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#logUncaughtExceptions-scala.Function0-">logUncaughtExceptions</a></span>(scala.Function0&lt;T&gt;&nbsp;f)</code>
<div class="block">Execute the given block, logging and re-throwing any uncaught exception.</div>
</td>
</tr>
<tr id="i99" class="rowColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#MAX_DIR_CREATION_ATTEMPTS--">MAX_DIR_CREATION_ATTEMPTS</a></span>()</code>&nbsp;</td>
</tr>
<tr id="i100" class="altColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#median-long:A-boolean-">median</a></span>(long[]&nbsp;sizes,
boolean&nbsp;alreadySorted)</code>
<div class="block">Return the median number of a long array</div>
</td>
</tr>
<tr id="i101" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#megabytesToString-long-">megabytesToString</a></span>(long&nbsp;megabytes)</code>
<div class="block">Convert a quantity in megabytes to a human-readable string such as "4.0 MiB".</div>
</td>
</tr>
<tr id="i102" class="altColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#memoryStringToMb-java.lang.String-">memoryStringToMb</a></span>(String&nbsp;str)</code>
<div class="block">Convert a Java memory parameter passed to -Xmx (such as 300m or 1g) to a number of mebibytes.</div>
</td>
</tr>
<tr id="i103" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#msDurationToString-long-">msDurationToString</a></span>(long&nbsp;ms)</code>
<div class="block">Returns a human-readable string representing a duration such as "35ms"</div>
</td>
</tr>
<tr id="i104" class="altColor">
<td class="colFirst"><code>static String[]</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#nonLocalPaths-java.lang.String-boolean-">nonLocalPaths</a></span>(String&nbsp;paths,
boolean&nbsp;testWindows)</code>
<div class="block">Return all non-local paths from a comma-separated list of paths.</div>
</td>
</tr>
<tr id="i105" class="rowColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#nonNegativeHash-java.lang.Object-">nonNegativeHash</a></span>(Object&nbsp;obj)</code>&nbsp;</td>
</tr>
<tr id="i106" class="altColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#nonNegativeMod-int-int-">nonNegativeMod</a></span>(int&nbsp;x,
int&nbsp;mod)</code>&nbsp;</td>
</tr>
<tr id="i107" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#offsetBytes-scala.collection.Seq-scala.collection.Seq-long-long-">offsetBytes</a></span>(scala.collection.Seq&lt;java.io.File&gt;&nbsp;files,
scala.collection.Seq&lt;Object&gt;&nbsp;fileLengths,
long&nbsp;start,
long&nbsp;end)</code>
<div class="block">Return a string containing data across a set of files.</div>
</td>
</tr>
<tr id="i108" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#offsetBytes-java.lang.String-long-long-long-">offsetBytes</a></span>(String&nbsp;path,
long&nbsp;length,
long&nbsp;start,
long&nbsp;end)</code>
<div class="block">Return a string containing part of a file from byte 'start' to 'end'.</div>
</td>
</tr>
<tr id="i109" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#org:Dapache:Dspark:Dinternal:DLogging:D:Dlog__:Deq-org.slf4j.Logger-">org$apache$spark$internal$Logging$$log__$eq</a></span>(org.slf4j.Logger&nbsp;x$1)</code>&nbsp;</td>
</tr>
<tr id="i110" class="altColor">
<td class="colFirst"><code>static org.slf4j.Logger</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#org:Dapache:Dspark:Dinternal:DLogging:D:Dlog_--">org$apache$spark$internal$Logging$$log_</a></span>()</code>&nbsp;</td>
</tr>
<tr id="i111" class="rowColor">
<td class="colFirst"><code>static scala.Tuple2&lt;String,Object&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#parseHostPort-java.lang.String-">parseHostPort</a></span>(String&nbsp;hostPort)</code>&nbsp;</td>
</tr>
<tr id="i112" class="altColor">
<td class="colFirst"><code>static String[]</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#parseStandaloneMasterUrls-java.lang.String-">parseStandaloneMasterUrls</a></span>(String&nbsp;masterUrls)</code>
<div class="block">Split the comma delimited string of master URLs into a list.</div>
</td>
</tr>
<tr id="i113" class="rowColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#portMaxRetries-org.apache.spark.SparkConf-">portMaxRetries</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>
<div class="block">Maximum number of retries when binding to a port before giving up.</div>
</td>
</tr>
<tr id="i114" class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#preferIPv6--">preferIPv6</a></span>()</code>
<div class="block">Whether the underlying JVM prefer IPv6 addresses.</div>
</td>
</tr>
<tr id="i115" class="rowColor">
<td class="colFirst"><code>static Thread</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#processStreamByLine-java.lang.String-java.io.InputStream-scala.Function1-">processStreamByLine</a></span>(String&nbsp;threadName,
java.io.InputStream&nbsp;inputStream,
scala.Function1&lt;String,scala.runtime.BoxedUnit&gt;&nbsp;processLine)</code>
<div class="block">Return and start a daemon thread that processes the content of the input stream line by line.</div>
</td>
</tr>
<tr id="i116" class="altColor">
<td class="colFirst"><code>static java.util.Random</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#random--">random</a></span>()</code>&nbsp;</td>
</tr>
<tr id="i117" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;scala.collection.Seq&lt;T&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#randomize-scala.collection.TraversableOnce-scala.reflect.ClassTag-">randomize</a></span>(scala.collection.TraversableOnce&lt;T&gt;&nbsp;seq,
scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$1)</code>
<div class="block">Shuffle the elements of a collection into a random order, returning the
result in a new collection.</div>
</td>
</tr>
<tr id="i118" class="altColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;Object</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#randomizeInPlace-java.lang.Object-java.util.Random-">randomizeInPlace</a></span>(Object&nbsp;arr,
java.util.Random&nbsp;rand)</code>
<div class="block">Shuffle the elements of an array into a random order, modifying the
original array.</div>
</td>
</tr>
<tr id="i119" class="rowColor">
<td class="colFirst"><code>static java.io.File[]</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#recursiveList-java.io.File-">recursiveList</a></span>(java.io.File&nbsp;f)</code>
<div class="block">Lists files recursively.</div>
</td>
</tr>
<tr id="i120" class="altColor">
<td class="colFirst"><code>static scala.collection.Seq&lt;scala.Tuple2&lt;String,String&gt;&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#redact-scala.collection.Map-">redact</a></span>(scala.collection.Map&lt;String,String&gt;&nbsp;kvs)</code>
<div class="block">Looks up the redaction regex from within the key value pairs and uses it to redact the rest
of the key value pairs.</div>
</td>
</tr>
<tr id="i121" class="rowColor">
<td class="colFirst"><code>static &lt;K,V&gt;&nbsp;scala.collection.Seq&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#redact-scala.Option-scala.collection.Seq-">redact</a></span>(scala.Option&lt;scala.util.matching.Regex&gt;&nbsp;regex,
scala.collection.Seq&lt;scala.Tuple2&lt;K,V&gt;&gt;&nbsp;kvs)</code>
<div class="block">Redact the sensitive values in the given map.</div>
</td>
</tr>
<tr id="i122" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#redact-scala.Option-java.lang.String-">redact</a></span>(scala.Option&lt;scala.util.matching.Regex&gt;&nbsp;regex,
String&nbsp;text)</code>
<div class="block">Redact the sensitive information in the given string.</div>
</td>
</tr>
<tr id="i123" class="rowColor">
<td class="colFirst"><code>static scala.collection.Seq&lt;scala.Tuple2&lt;String,String&gt;&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#redact-org.apache.spark.SparkConf-scala.collection.Seq-">redact</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
scala.collection.Seq&lt;scala.Tuple2&lt;String,String&gt;&gt;&nbsp;kvs)</code>
<div class="block">Redact the sensitive values in the given map.</div>
</td>
</tr>
<tr id="i124" class="altColor">
<td class="colFirst"><code>static scala.collection.Seq&lt;String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#redactCommandLineArgs-org.apache.spark.SparkConf-scala.collection.Seq-">redactCommandLineArgs</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
scala.collection.Seq&lt;String&gt;&nbsp;commands)</code>&nbsp;</td>
</tr>
<tr id="i125" class="rowColor">
<td class="colFirst"><code>static java.net.URI</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#resolveURI-java.lang.String-">resolveURI</a></span>(String&nbsp;path)</code>
<div class="block">Return a well-formed URI for the file described by a user input string.</div>
</td>
</tr>
<tr id="i126" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#resolveURIs-java.lang.String-">resolveURIs</a></span>(String&nbsp;paths)</code>
<div class="block">Resolve a comma-separated list of paths.</div>
</td>
</tr>
<tr id="i127" class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#responseFromBackup-java.lang.String-">responseFromBackup</a></span>(String&nbsp;msg)</code>
<div class="block">Return true if the response message is sent from a backup Master on standby.</div>
</td>
</tr>
<tr id="i128" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#sanitizeDirName-java.lang.String-">sanitizeDirName</a></span>(String&nbsp;str)</code>&nbsp;</td>
</tr>
<tr id="i129" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;byte[]</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#serialize-T-">serialize</a></span>(T&nbsp;o)</code>
<div class="block">Serialize an object using Java serialization</div>
</td>
</tr>
<tr id="i130" class="altColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#serializeViaNestedStream-java.io.OutputStream-org.apache.spark.serializer.SerializerInstance-scala.Function1-">serializeViaNestedStream</a></span>(java.io.OutputStream&nbsp;os,
<a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a>&nbsp;ser,
scala.Function1&lt;<a href="../../../../org/apache/spark/serializer/SerializationStream.html" title="class in org.apache.spark.serializer">SerializationStream</a>,scala.runtime.BoxedUnit&gt;&nbsp;f)</code>
<div class="block">Serialize via nested stream using specific serializer</div>
</td>
</tr>
<tr id="i131" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#setCustomHostname-java.lang.String-">setCustomHostname</a></span>(String&nbsp;hostname)</code>
<div class="block">Allow setting a custom host name because when we run on Mesos we need to use the same
hostname it reports to the master.</div>
</td>
</tr>
<tr id="i132" class="altColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#setLogLevel-org.apache.logging.log4j.Level-">setLogLevel</a></span>(org.apache.logging.log4j.Level&nbsp;l)</code>
<div class="block">configure a new log4j level</div>
</td>
</tr>
<tr id="i133" class="rowColor">
<td class="colFirst"><code>static scala.collection.Seq&lt;String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#sparkJavaOpts-org.apache.spark.SparkConf-scala.Function1-">sparkJavaOpts</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
scala.Function1&lt;String,Object&gt;&nbsp;filterKey)</code>
<div class="block">Convert all spark properties set in the given SparkConf to a sequence of java options.</div>
</td>
</tr>
<tr id="i134" class="altColor">
<td class="colFirst"><code>static scala.collection.Seq&lt;String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#splitCommandString-java.lang.String-">splitCommandString</a></span>(String&nbsp;s)</code>
<div class="block">Split a string of potentially quoted arguments from the command line the way that a shell
would do it to determine arguments to a command.</div>
</td>
</tr>
<tr id="i135" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;scala.Tuple2&lt;T,Object&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#startServiceOnPort-int-scala.Function1-org.apache.spark.SparkConf-java.lang.String-">startServiceOnPort</a></span>(int&nbsp;startPort,
scala.Function1&lt;Object,scala.Tuple2&lt;T,Object&gt;&gt;&nbsp;startService,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
String&nbsp;serviceName)</code>
<div class="block">Attempt to start a service on the given port, or fail after a number of attempts.</div>
</td>
</tr>
<tr id="i136" class="altColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#stringHalfWidth-java.lang.String-">stringHalfWidth</a></span>(String&nbsp;str)</code>
<div class="block">Return the number of half widths in a given string.</div>
</td>
</tr>
<tr id="i137" class="rowColor">
<td class="colFirst"><code>static scala.collection.Seq&lt;String&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#stringToSeq-java.lang.String-">stringToSeq</a></span>(String&nbsp;str)</code>&nbsp;</td>
</tr>
<tr id="i138" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#stripDollars-java.lang.String-">stripDollars</a></span>(String&nbsp;s)</code>
<div class="block">Remove trailing dollar signs from qualified class name,
and return the trailing part after the last dollar sign in the middle</div>
</td>
</tr>
<tr id="i139" class="rowColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#substituteAppId-java.lang.String-java.lang.String-">substituteAppId</a></span>(String&nbsp;opt,
String&nbsp;appId)</code>
<div class="block">Replaces all the {{APP_ID}} occurrences with the App Id.</div>
</td>
</tr>
<tr id="i140" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#substituteAppNExecIds-java.lang.String-java.lang.String-java.lang.String-">substituteAppNExecIds</a></span>(String&nbsp;opt,
String&nbsp;appId,
String&nbsp;execId)</code>
<div class="block">Replaces all the {{EXECUTOR_ID}} occurrences with the Executor Id
and {{APP_ID}} occurrences with the App Id.</div>
</td>
</tr>
<tr id="i141" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#symlink-java.io.File-java.io.File-">symlink</a></span>(java.io.File&nbsp;src,
java.io.File&nbsp;dst)</code>
<div class="block">Creates a symlink.</div>
</td>
</tr>
<tr id="i142" class="altColor">
<td class="colFirst"><code>static java.io.File</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tempFileWith-java.io.File-">tempFileWith</a></span>(java.io.File&nbsp;path)</code>
<div class="block">Returns a path of temporary file which is in the same directory with <code>path</code>.</div>
</td>
</tr>
<tr id="i143" class="rowColor">
<td class="colFirst"><code>static scala.Option&lt;Object&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#terminateProcess-java.lang.Process-long-">terminateProcess</a></span>(Process&nbsp;process,
long&nbsp;timeoutMs)</code>
<div class="block">Terminates a process waiting for at most the specified duration.</div>
</td>
</tr>
<tr id="i144" class="altColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#timeIt-int-scala.Function0-scala.Option-">timeIt</a></span>(int&nbsp;numIters,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;f,
scala.Option&lt;scala.Function0&lt;scala.runtime.BoxedUnit&gt;&gt;&nbsp;prepare)</code>
<div class="block">Timing method based on iterations that permit JVM JIT optimization.</div>
</td>
</tr>
<tr id="i145" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#times-int-scala.Function0-">times</a></span>(int&nbsp;numIters,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;f)</code>
<div class="block">Method executed for repeating a task for side effects.</div>
</td>
</tr>
<tr id="i146" class="altColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#timeStringAsMs-java.lang.String-">timeStringAsMs</a></span>(String&nbsp;str)</code>
<div class="block">Convert a time parameter such as (50s, 100ms, or 250us) to milliseconds for internal use.</div>
</td>
</tr>
<tr id="i147" class="rowColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#timeStringAsSeconds-java.lang.String-">timeStringAsSeconds</a></span>(String&nbsp;str)</code>
<div class="block">Convert a time parameter such as (50s, 100ms, or 250us) to seconds for internal use.</div>
</td>
</tr>
<tr id="i148" class="altColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;scala.Tuple2&lt;T,Object&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#timeTakenMs-scala.Function0-">timeTakenMs</a></span>(scala.Function0&lt;T&gt;&nbsp;body)</code>
<div class="block">Records the duration of running `body`.</div>
</td>
</tr>
<tr id="i149" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;scala.util.Try&lt;T&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryLog-scala.Function0-">tryLog</a></span>(scala.Function0&lt;T&gt;&nbsp;f)</code>
<div class="block">Executes the given block in a Try, logging any uncaught exceptions.</div>
</td>
</tr>
<tr id="i150" class="altColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryLogNonFatalError-scala.Function0-">tryLogNonFatalError</a></span>(scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;block)</code>
<div class="block">Executes the given block.</div>
</td>
</tr>
<tr id="i151" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryOrExit-scala.Function0-">tryOrExit</a></span>(scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;block)</code>
<div class="block">Execute a block of code that evaluates to Unit, forwarding any uncaught exceptions to the
default UncaughtExceptionHandler</div>
</td>
</tr>
<tr id="i152" class="altColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryOrIOException-scala.Function0-">tryOrIOException</a></span>(scala.Function0&lt;T&gt;&nbsp;block)</code>
<div class="block">Execute a block of code that returns a value, re-throwing any non-fatal uncaught
exceptions as IOException.</div>
</td>
</tr>
<tr id="i153" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryOrStopSparkContext-org.apache.spark.SparkContext-scala.Function0-">tryOrStopSparkContext</a></span>(<a href="../../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a>&nbsp;sc,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;block)</code>
<div class="block">Execute a block of code that evaluates to Unit, stop SparkContext if there is any uncaught
exception</div>
</td>
</tr>
<tr id="i154" class="altColor">
<td class="colFirst"><code>static &lt;R extends java.io.Closeable,T&gt;<br>T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryWithResource-scala.Function0-scala.Function1-">tryWithResource</a></span>(scala.Function0&lt;R&gt;&nbsp;createResource,
scala.Function1&lt;R,T&gt;&nbsp;f)</code>&nbsp;</td>
</tr>
<tr id="i155" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryWithSafeFinally-scala.Function0-scala.Function0-">tryWithSafeFinally</a></span>(scala.Function0&lt;T&gt;&nbsp;block,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;finallyBlock)</code>
<div class="block">Execute a block of code, then a finally block, but if exceptions happen in
the finally block, do not suppress the original exception.</div>
</td>
</tr>
<tr id="i156" class="altColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryWithSafeFinallyAndFailureCallbacks-scala.Function0-scala.Function0-scala.Function0-">tryWithSafeFinallyAndFailureCallbacks</a></span>(scala.Function0&lt;T&gt;&nbsp;block,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;catchBlock,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;finallyBlock)</code>
<div class="block">Execute a block of code and call the failure callbacks in the catch block.</div>
</td>
</tr>
<tr id="i157" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#unpack-java.io.File-java.io.File-">unpack</a></span>(java.io.File&nbsp;source,
java.io.File&nbsp;dest)</code>
<div class="block">Unpacks an archive file into the specified directory.</div>
</td>
</tr>
<tr id="i158" class="altColor">
<td class="colFirst"><code>static scala.collection.Seq&lt;java.io.File&gt;</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#unzipFilesFromFile-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.io.File-">unzipFilesFromFile</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
org.apache.hadoop.fs.Path&nbsp;dfsZipFile,
java.io.File&nbsp;localDir)</code>
<div class="block">Decompress a zip file into a local dir.</div>
</td>
</tr>
<tr id="i159" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#updateSparkConfigFromProperties-org.apache.spark.SparkConf-scala.collection.Map-">updateSparkConfigFromProperties</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
scala.collection.Map&lt;String,String&gt;&nbsp;properties)</code>
<div class="block">Updates Spark config with properties from a set of Properties.</div>
</td>
</tr>
<tr id="i160" class="altColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#userPort-int-int-">userPort</a></span>(int&nbsp;base,
int&nbsp;offset)</code>
<div class="block">Returns the user port to try when trying to bind a service.</div>
</td>
</tr>
<tr id="i161" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#validateURL-java.net.URI-">validateURL</a></span>(java.net.URI&nbsp;uri)</code>
<div class="block">Validate that a given URI is actually a valid URL as well.</div>
</td>
</tr>
<tr id="i162" class="altColor">
<td class="colFirst"><code>static String</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#weakIntern-java.lang.String-">weakIntern</a></span>(String&nbsp;s)</code>
<div class="block">String interning to reduce the memory usage.</div>
</td>
</tr>
<tr id="i163" class="rowColor">
<td class="colFirst"><code>static scala.util.matching.Regex</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#windowsDrive--">windowsDrive</a></span>()</code>
<div class="block">Pattern for matching a Windows drive, which contains only a single alphabet character.</div>
</td>
</tr>
<tr id="i164" class="altColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#withContextClassLoader-java.lang.ClassLoader-scala.Function0-">withContextClassLoader</a></span>(ClassLoader&nbsp;ctxClassLoader,
scala.Function0&lt;T&gt;&nbsp;fn)</code>
<div class="block">Run a segment of code using a different context class loader in the current thread</div>
</td>
</tr>
<tr id="i165" class="rowColor">
<td class="colFirst"><code>static &lt;T&gt;&nbsp;T</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#withDummyCallSite-org.apache.spark.SparkContext-scala.Function0-">withDummyCallSite</a></span>(<a href="../../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a>&nbsp;sc,
scala.Function0&lt;T&gt;&nbsp;body)</code>
<div class="block">To avoid calling <code>Utils.getCallSite</code> for every single RDD we create in the body,
set a dummy call site that RDDs use instead.</div>
</td>
</tr>
<tr id="i166" class="altColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#writeByteBuffer-java.nio.ByteBuffer-java.io.DataOutput-">writeByteBuffer</a></span>(java.nio.ByteBuffer&nbsp;bb,
java.io.DataOutput&nbsp;out)</code>
<div class="block">Primitive often used when writing <code>ByteBuffer</code> to <code>DataOutput</code></div>
</td>
</tr>
<tr id="i167" class="rowColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#writeByteBuffer-java.nio.ByteBuffer-java.io.OutputStream-">writeByteBuffer</a></span>(java.nio.ByteBuffer&nbsp;bb,
java.io.OutputStream&nbsp;out)</code>
<div class="block">Primitive often used when writing <code>ByteBuffer</code> to <code>OutputStream</code></div>
</td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.Object">
<!-- -->
</a>
<h3>Methods inherited from class&nbsp;Object</h3>
<code>equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="Utils--">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>Utils</h4>
<pre>public&nbsp;Utils()</pre>
</li>
</ul>
</li>
</ul>
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="random--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>random</h4>
<pre>public static&nbsp;java.util.Random&nbsp;random()</pre>
</li>
</ul>
<a name="DEFAULT_DRIVER_MEM_MB--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>DEFAULT_DRIVER_MEM_MB</h4>
<pre>public static&nbsp;int&nbsp;DEFAULT_DRIVER_MEM_MB()</pre>
<div class="block">Define a default value for driver memory here since this value is referenced across the code
base and nearly all files already use Utils.scala</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="MAX_DIR_CREATION_ATTEMPTS--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>MAX_DIR_CREATION_ATTEMPTS</h4>
<pre>public static&nbsp;int&nbsp;MAX_DIR_CREATION_ATTEMPTS()</pre>
</li>
</ul>
<a name="LOCAL_SCHEME--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>LOCAL_SCHEME</h4>
<pre>public static&nbsp;String&nbsp;LOCAL_SCHEME()</pre>
<div class="block">Scheme used for files that are locally available on worker nodes in the cluster.</div>
</li>
</ul>
<a name="serialize-java.lang.Object-">
<!-- -->
</a><a name="serialize-T-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>serialize</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;byte[]&nbsp;serialize(T&nbsp;o)</pre>
<div class="block">Serialize an object using Java serialization</div>
</li>
</ul>
<a name="deserialize-byte:A-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>deserialize</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;deserialize(byte[]&nbsp;bytes)</pre>
<div class="block">Deserialize an object using Java serialization</div>
</li>
</ul>
<a name="deserialize-byte:A-java.lang.ClassLoader-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>deserialize</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;deserialize(byte[]&nbsp;bytes,
ClassLoader&nbsp;loader)</pre>
<div class="block">Deserialize an object using Java serialization and the given ClassLoader</div>
</li>
</ul>
<a name="deserializeLongValue-byte:A-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>deserializeLongValue</h4>
<pre>public static&nbsp;long&nbsp;deserializeLongValue(byte[]&nbsp;bytes)</pre>
<div class="block">Deserialize a Long value (used for <code>org.apache.spark.api.python.PythonPartitioner</code>)</div>
</li>
</ul>
<a name="serializeViaNestedStream-java.io.OutputStream-org.apache.spark.serializer.SerializerInstance-scala.Function1-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>serializeViaNestedStream</h4>
<pre>public static&nbsp;void&nbsp;serializeViaNestedStream(java.io.OutputStream&nbsp;os,
<a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a>&nbsp;ser,
scala.Function1&lt;<a href="../../../../org/apache/spark/serializer/SerializationStream.html" title="class in org.apache.spark.serializer">SerializationStream</a>,scala.runtime.BoxedUnit&gt;&nbsp;f)</pre>
<div class="block">Serialize via nested stream using specific serializer</div>
</li>
</ul>
<a name="deserializeViaNestedStream-java.io.InputStream-org.apache.spark.serializer.SerializerInstance-scala.Function1-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>deserializeViaNestedStream</h4>
<pre>public static&nbsp;void&nbsp;deserializeViaNestedStream(java.io.InputStream&nbsp;is,
<a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a>&nbsp;ser,
scala.Function1&lt;<a href="../../../../org/apache/spark/serializer/DeserializationStream.html" title="class in org.apache.spark.serializer">DeserializationStream</a>,scala.runtime.BoxedUnit&gt;&nbsp;f)</pre>
<div class="block">Deserialize via nested stream using specific serializer</div>
</li>
</ul>
<a name="weakIntern-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>weakIntern</h4>
<pre>public static&nbsp;String&nbsp;weakIntern(String&nbsp;s)</pre>
<div class="block">String interning to reduce the memory usage.</div>
</li>
</ul>
<a name="getSparkClassLoader--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getSparkClassLoader</h4>
<pre>public static&nbsp;ClassLoader&nbsp;getSparkClassLoader()</pre>
<div class="block">Get the ClassLoader which loaded Spark.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getContextOrSparkClassLoader--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getContextOrSparkClassLoader</h4>
<pre>public static&nbsp;ClassLoader&nbsp;getContextOrSparkClassLoader()</pre>
<div class="block">Get the Context ClassLoader on this thread or, if not present, the ClassLoader that
loaded Spark.
<p>
This should be used whenever passing a ClassLoader to Class.ForName or finding the currently
active loader when setting up ClassLoader delegation chains.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="classIsLoadable-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>classIsLoadable</h4>
<pre>public static&nbsp;boolean&nbsp;classIsLoadable(String&nbsp;clazz)</pre>
<div class="block">Determines whether the provided class is loadable in the current thread.</div>
</li>
</ul>
<a name="classForName-java.lang.String-boolean-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>classForName</h4>
<pre>public static&nbsp;&lt;C&gt;&nbsp;Class&lt;C&gt;&nbsp;classForName(String&nbsp;className,
boolean&nbsp;initialize,
boolean&nbsp;noSparkClassLoader)</pre>
<div class="block">Preferred alternative to Class.forName(className), as well as
Class.forName(className, initialize, loader) with current thread's ContextClassLoader.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>className</code> - (undocumented)</dd>
<dd><code>initialize</code> - (undocumented)</dd>
<dd><code>noSparkClassLoader</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="withContextClassLoader-java.lang.ClassLoader-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>withContextClassLoader</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;withContextClassLoader(ClassLoader&nbsp;ctxClassLoader,
scala.Function0&lt;T&gt;&nbsp;fn)</pre>
<div class="block">Run a segment of code using a different context class loader in the current thread</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>ctxClassLoader</code> - (undocumented)</dd>
<dd><code>fn</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="writeByteBuffer-java.nio.ByteBuffer-java.io.DataOutput-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>writeByteBuffer</h4>
<pre>public static&nbsp;void&nbsp;writeByteBuffer(java.nio.ByteBuffer&nbsp;bb,
java.io.DataOutput&nbsp;out)</pre>
<div class="block">Primitive often used when writing <code>ByteBuffer</code> to <code>DataOutput</code></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>bb</code> - (undocumented)</dd>
<dd><code>out</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="writeByteBuffer-java.nio.ByteBuffer-java.io.OutputStream-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>writeByteBuffer</h4>
<pre>public static&nbsp;void&nbsp;writeByteBuffer(java.nio.ByteBuffer&nbsp;bb,
java.io.OutputStream&nbsp;out)</pre>
<div class="block">Primitive often used when writing <code>ByteBuffer</code> to <code>OutputStream</code></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>bb</code> - (undocumented)</dd>
<dd><code>out</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="chmod700-java.io.File-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>chmod700</h4>
<pre>public static&nbsp;boolean&nbsp;chmod700(java.io.File&nbsp;file)</pre>
<div class="block">JDK equivalent of <code>chmod 700 file</code>.
<p></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>file</code> - the file whose permissions will be modified</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>true if the permissions were successfully changed, false otherwise.</dd>
</dl>
</li>
</ul>
<a name="createDirectory-java.io.File-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>createDirectory</h4>
<pre>public static&nbsp;boolean&nbsp;createDirectory(java.io.File&nbsp;dir)</pre>
<div class="block">Create a directory given the abstract pathname</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>dir</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>true, if the directory is successfully created; otherwise, return false.</dd>
</dl>
</li>
</ul>
<a name="createDirectory-java.lang.String-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>createDirectory</h4>
<pre>public static&nbsp;java.io.File&nbsp;createDirectory(String&nbsp;root,
String&nbsp;namePrefix)</pre>
<div class="block">Create a directory inside the given parent directory. The directory is guaranteed to be
newly created, and is not marked for automatic deletion.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>root</code> - (undocumented)</dd>
<dd><code>namePrefix</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="createTempDir--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>createTempDir</h4>
<pre>public static&nbsp;java.io.File&nbsp;createTempDir()</pre>
<div class="block">Create a temporary directory inside the <code>java.io.tmpdir</code> prefixed with <code>spark</code>.
The directory will be automatically deleted when the VM shuts down.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="createTempDir-java.lang.String-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>createTempDir</h4>
<pre>public static&nbsp;java.io.File&nbsp;createTempDir(String&nbsp;root,
String&nbsp;namePrefix)</pre>
<div class="block">Create a temporary directory inside the given parent directory. The directory will be
automatically deleted when the VM shuts down.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>root</code> - (undocumented)</dd>
<dd><code>namePrefix</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="copyStream-java.io.InputStream-java.io.OutputStream-boolean-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>copyStream</h4>
<pre>public static&nbsp;long&nbsp;copyStream(java.io.InputStream&nbsp;in,
java.io.OutputStream&nbsp;out,
boolean&nbsp;closeStreams,
boolean&nbsp;transferToEnabled)</pre>
<div class="block">Copy all data from an InputStream to an OutputStream. NIO way of file stream to file stream
copying is disabled by default unless explicitly set transferToEnabled as true,
the parameter transferToEnabled should be configured by spark.file.transferTo = [true|false].</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>in</code> - (undocumented)</dd>
<dd><code>out</code> - (undocumented)</dd>
<dd><code>closeStreams</code> - (undocumented)</dd>
<dd><code>transferToEnabled</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="copyStreamUpTo-java.io.InputStream-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>copyStreamUpTo</h4>
<pre>public static&nbsp;java.io.InputStream&nbsp;copyStreamUpTo(java.io.InputStream&nbsp;in,
long&nbsp;maxSize)</pre>
<div class="block">Copy the first <code>maxSize</code> bytes of data from the InputStream to an in-memory
buffer, primarily to check for corruption.
<p>
This returns a new InputStream which contains the same data as the original input stream.
It may be entirely on in-memory buffer, or it may be a combination of in-memory data, and then
continue to read from the original stream. The only real use of this is if the original input
stream will potentially detect corruption while the data is being read (e.g. from compression).
This allows for an eager check of corruption in the first maxSize bytes of data.
<p></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>in</code> - (undocumented)</dd>
<dd><code>maxSize</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>An InputStream which includes all data from the original stream (combining buffered
data and remaining data in the original stream)</dd>
</dl>
</li>
</ul>
<a name="copyFileStreamNIO-java.nio.channels.FileChannel-java.nio.channels.WritableByteChannel-long-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>copyFileStreamNIO</h4>
<pre>public static&nbsp;void&nbsp;copyFileStreamNIO(java.nio.channels.FileChannel&nbsp;input,
java.nio.channels.WritableByteChannel&nbsp;output,
long&nbsp;startPosition,
long&nbsp;bytesToCopy)</pre>
</li>
</ul>
<a name="encodeFileNameToURIRawPath-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>encodeFileNameToURIRawPath</h4>
<pre>public static&nbsp;String&nbsp;encodeFileNameToURIRawPath(String&nbsp;fileName)</pre>
<div class="block">A file name may contain some invalid URI characters, such as " ". This method will convert the
file name to a raw path accepted by <code>java.net.URI(String)</code>.
<p>
Note: the file name must not contain "/" or "\"</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>fileName</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="decodeFileNameInURI-java.net.URI-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>decodeFileNameInURI</h4>
<pre>public static&nbsp;String&nbsp;decodeFileNameInURI(java.net.URI&nbsp;uri)</pre>
<div class="block">Get the file name from uri's raw path and decode it. If the raw path of uri ends with "/",
return the name before the last "/".</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>uri</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="fetchFile-java.lang.String-java.io.File-org.apache.spark.SparkConf-org.apache.hadoop.conf.Configuration-long-boolean-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>fetchFile</h4>
<pre>public static&nbsp;java.io.File&nbsp;fetchFile(String&nbsp;url,
java.io.File&nbsp;targetDir,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
org.apache.hadoop.conf.Configuration&nbsp;hadoopConf,
long&nbsp;timestamp,
boolean&nbsp;useCache,
boolean&nbsp;shouldUntar)</pre>
<div class="block">Download a file or directory to target directory. Supports fetching the file in a variety of
ways, including HTTP, Hadoop-compatible filesystems, and files on a standard filesystem, based
on the URL parameter. Fetching directories is only supported from Hadoop-compatible
filesystems.
<p>
If <code>useCache</code> is true, first attempts to fetch the file to a local cache that's shared
across executors running the same application. <code>useCache</code> is used mainly for
the executors, and not in local mode.
<p>
Throws SparkException if the target file already exists and has different contents than
the requested file.
<p>
If <code>shouldUntar</code> is true, it untars the given url if it is a tar.gz or tgz into <code>targetDir</code>.
This is a legacy behavior, and users should better use <code>spark.archives</code> configuration or
<code>SparkContext.addArchive</code></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>url</code> - (undocumented)</dd>
<dd><code>targetDir</code> - (undocumented)</dd>
<dd><code>conf</code> - (undocumented)</dd>
<dd><code>hadoopConf</code> - (undocumented)</dd>
<dd><code>timestamp</code> - (undocumented)</dd>
<dd><code>useCache</code> - (undocumented)</dd>
<dd><code>shouldUntar</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="unpack-java.io.File-java.io.File-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>unpack</h4>
<pre>public static&nbsp;void&nbsp;unpack(java.io.File&nbsp;source,
java.io.File&nbsp;dest)</pre>
<div class="block">Unpacks an archive file into the specified directory. It expects .jar, .zip, .tar.gz, .tgz
and .tar files. This behaves same as Hadoop's archive in distributed cache. This method is
basically copied from <code>org.apache.hadoop.yarn.util.FSDownload.unpack</code>.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>source</code> - (undocumented)</dd>
<dd><code>dest</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="timeTakenMs-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>timeTakenMs</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;scala.Tuple2&lt;T,Object&gt;&nbsp;timeTakenMs(scala.Function0&lt;T&gt;&nbsp;body)</pre>
<div class="block">Records the duration of running `body`.</div>
</li>
</ul>
<a name="doFetchFile-java.lang.String-java.io.File-java.lang.String-org.apache.spark.SparkConf-org.apache.hadoop.conf.Configuration-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>doFetchFile</h4>
<pre>public static&nbsp;java.io.File&nbsp;doFetchFile(String&nbsp;url,
java.io.File&nbsp;targetDir,
String&nbsp;filename,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
org.apache.hadoop.conf.Configuration&nbsp;hadoopConf)</pre>
<div class="block">Download a file or directory to target directory. Supports fetching the file in a variety of
ways, including HTTP, Hadoop-compatible filesystems, and files on a standard filesystem, based
on the URL parameter. Fetching directories is only supported from Hadoop-compatible
filesystems.
<p>
Throws SparkException if the target file already exists and has different contents than
the requested file.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>url</code> - (undocumented)</dd>
<dd><code>targetDir</code> - (undocumented)</dd>
<dd><code>filename</code> - (undocumented)</dd>
<dd><code>conf</code> - (undocumented)</dd>
<dd><code>hadoopConf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="validateURL-java.net.URI-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>validateURL</h4>
<pre>public static&nbsp;void&nbsp;validateURL(java.net.URI&nbsp;uri)
throws java.net.MalformedURLException</pre>
<div class="block">Validate that a given URI is actually a valid URL as well.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>uri</code> - The URI to validate</dd>
<dt><span class="throwsLabel">Throws:</span></dt>
<dd><code>java.net.MalformedURLException</code></dd>
</dl>
</li>
</ul>
<a name="getLocalDir-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getLocalDir</h4>
<pre>public static&nbsp;String&nbsp;getLocalDir(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
<div class="block">Get the path of a temporary directory. Spark's local directories can be configured through
multiple settings, which are used with the following precedence:
<p>
- If called from inside of a YARN container, this will return a directory chosen by YARN.
- If the SPARK_LOCAL_DIRS environment variable is set, this will return a directory from it.
- Otherwise, if the spark.local.dir is set, this will return a directory from it.
- Otherwise, this will return java.io.tmpdir.
<p>
Some of these configuration options might be lists of multiple paths, but this method will
always return a single directory. The return directory is chosen randomly from the array
of directories it gets from getOrCreateLocalRootDirs.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="isInRunningSparkTask--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isInRunningSparkTask</h4>
<pre>public static&nbsp;boolean&nbsp;isInRunningSparkTask()</pre>
<div class="block">Returns if the current codes are running in a Spark task, e.g., in executors.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getConfiguredLocalDirs-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getConfiguredLocalDirs</h4>
<pre>public static&nbsp;String[]&nbsp;getConfiguredLocalDirs(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
<div class="block">Return the configured local directories where Spark can write files. This
method does not create any directories on its own, it only encapsulates the
logic of locating the local directories according to deployment mode.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="randomize-scala.collection.TraversableOnce-scala.reflect.ClassTag-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>randomize</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;scala.collection.Seq&lt;T&gt;&nbsp;randomize(scala.collection.TraversableOnce&lt;T&gt;&nbsp;seq,
scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$1)</pre>
<div class="block">Shuffle the elements of a collection into a random order, returning the
result in a new collection. Unlike scala.util.Random.shuffle, this method
uses a local random number generator, avoiding inter-thread contention.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>seq</code> - (undocumented)</dd>
<dd><code>evidence$1</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="randomizeInPlace-java.lang.Object-java.util.Random-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>randomizeInPlace</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;Object&nbsp;randomizeInPlace(Object&nbsp;arr,
java.util.Random&nbsp;rand)</pre>
<div class="block">Shuffle the elements of an array into a random order, modifying the
original array. Returns the original array.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>arr</code> - (undocumented)</dd>
<dd><code>rand</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="setCustomHostname-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setCustomHostname</h4>
<pre>public static&nbsp;void&nbsp;setCustomHostname(String&nbsp;hostname)</pre>
<div class="block">Allow setting a custom host name because when we run on Mesos we need to use the same
hostname it reports to the master.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>hostname</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="localCanonicalHostName--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>localCanonicalHostName</h4>
<pre>public static&nbsp;String&nbsp;localCanonicalHostName()</pre>
<div class="block">Get the local machine's FQDN.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="localHostName--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>localHostName</h4>
<pre>public static&nbsp;String&nbsp;localHostName()</pre>
<div class="block">Get the local machine's hostname.
In case of IPv6, getHostAddress may return '0:0:0:0:0:0:0:1'.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="localHostNameForURI--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>localHostNameForURI</h4>
<pre>public static&nbsp;String&nbsp;localHostNameForURI()</pre>
<div class="block">Get the local machine's URI.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="checkHost-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>checkHost</h4>
<pre>public static&nbsp;void&nbsp;checkHost(String&nbsp;host)</pre>
<div class="block">Checks if the host contains only valid hostname/ip without port
NOTE: Incase of IPV6 ip it should be enclosed inside []</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>host</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="checkHostPort-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>checkHostPort</h4>
<pre>public static&nbsp;void&nbsp;checkHostPort(String&nbsp;hostPort)</pre>
</li>
</ul>
<a name="parseHostPort-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>parseHostPort</h4>
<pre>public static&nbsp;scala.Tuple2&lt;String,Object&gt;&nbsp;parseHostPort(String&nbsp;hostPort)</pre>
</li>
</ul>
<a name="getUsedTimeNs-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getUsedTimeNs</h4>
<pre>public static&nbsp;String&nbsp;getUsedTimeNs(long&nbsp;startTimeNs)</pre>
<div class="block">Return the string to tell how long has passed in milliseconds.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>startTimeNs</code> - - a timestamp in nanoseconds returned by <code>System.nanoTime</code>.</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="recursiveList-java.io.File-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>recursiveList</h4>
<pre>public static&nbsp;java.io.File[]&nbsp;recursiveList(java.io.File&nbsp;f)</pre>
<div class="block">Lists files recursively.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>f</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="deleteRecursively-java.io.File-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>deleteRecursively</h4>
<pre>public static&nbsp;void&nbsp;deleteRecursively(java.io.File&nbsp;file)</pre>
<div class="block">Delete a file or directory and its contents recursively.
Don't follow directories if they are symlinks.
Throws an exception if deletion is unsuccessful.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>file</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="doesDirectoryContainAnyNewFiles-java.io.File-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>doesDirectoryContainAnyNewFiles</h4>
<pre>public static&nbsp;boolean&nbsp;doesDirectoryContainAnyNewFiles(java.io.File&nbsp;dir,
long&nbsp;cutoff)</pre>
<div class="block">Determines if a directory contains any files newer than cutoff seconds.
<p></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>dir</code> - must be the path to a directory, or IllegalArgumentException is thrown</dd>
<dd><code>cutoff</code> - measured in seconds. Returns true if there are any files or directories in the
given directory whose last modified time is later than this many seconds ago</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="timeStringAsMs-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>timeStringAsMs</h4>
<pre>public static&nbsp;long&nbsp;timeStringAsMs(String&nbsp;str)</pre>
<div class="block">Convert a time parameter such as (50s, 100ms, or 250us) to milliseconds for internal use. If
no suffix is provided, the passed number is assumed to be in ms.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>str</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="timeStringAsSeconds-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>timeStringAsSeconds</h4>
<pre>public static&nbsp;long&nbsp;timeStringAsSeconds(String&nbsp;str)</pre>
<div class="block">Convert a time parameter such as (50s, 100ms, or 250us) to seconds for internal use. If
no suffix is provided, the passed number is assumed to be in seconds.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>str</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="byteStringAsBytes-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>byteStringAsBytes</h4>
<pre>public static&nbsp;long&nbsp;byteStringAsBytes(String&nbsp;str)</pre>
<div class="block">Convert a passed byte string (e.g. 50b, 100k, or 250m) to bytes for internal use.
<p>
If no suffix is provided, the passed number is assumed to be in bytes.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>str</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="byteStringAsKb-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>byteStringAsKb</h4>
<pre>public static&nbsp;long&nbsp;byteStringAsKb(String&nbsp;str)</pre>
<div class="block">Convert a passed byte string (e.g. 50b, 100k, or 250m) to kibibytes for internal use.
<p>
If no suffix is provided, the passed number is assumed to be in kibibytes.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>str</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="byteStringAsMb-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>byteStringAsMb</h4>
<pre>public static&nbsp;long&nbsp;byteStringAsMb(String&nbsp;str)</pre>
<div class="block">Convert a passed byte string (e.g. 50b, 100k, or 250m) to mebibytes for internal use.
<p>
If no suffix is provided, the passed number is assumed to be in mebibytes.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>str</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="byteStringAsGb-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>byteStringAsGb</h4>
<pre>public static&nbsp;long&nbsp;byteStringAsGb(String&nbsp;str)</pre>
<div class="block">Convert a passed byte string (e.g. 50b, 100k, or 250m, 500g) to gibibytes for internal use.
<p>
If no suffix is provided, the passed number is assumed to be in gibibytes.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>str</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="memoryStringToMb-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>memoryStringToMb</h4>
<pre>public static&nbsp;int&nbsp;memoryStringToMb(String&nbsp;str)</pre>
<div class="block">Convert a Java memory parameter passed to -Xmx (such as 300m or 1g) to a number of mebibytes.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>str</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="bytesToString-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>bytesToString</h4>
<pre>public static&nbsp;String&nbsp;bytesToString(long&nbsp;size)</pre>
<div class="block">Convert a quantity in bytes to a human-readable string such as "4.0 MiB".</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>size</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="bytesToString-scala.math.BigInt-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>bytesToString</h4>
<pre>public static&nbsp;String&nbsp;bytesToString(scala.math.BigInt&nbsp;size)</pre>
</li>
</ul>
<a name="msDurationToString-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>msDurationToString</h4>
<pre>public static&nbsp;String&nbsp;msDurationToString(long&nbsp;ms)</pre>
<div class="block">Returns a human-readable string representing a duration such as "35ms"</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>ms</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="megabytesToString-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>megabytesToString</h4>
<pre>public static&nbsp;String&nbsp;megabytesToString(long&nbsp;megabytes)</pre>
<div class="block">Convert a quantity in megabytes to a human-readable string such as "4.0 MiB".</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>megabytes</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="executeCommand-scala.collection.Seq-java.io.File-scala.collection.Map-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>executeCommand</h4>
<pre>public static&nbsp;Process&nbsp;executeCommand(scala.collection.Seq&lt;String&gt;&nbsp;command,
java.io.File&nbsp;workingDir,
scala.collection.Map&lt;String,String&gt;&nbsp;extraEnvironment,
boolean&nbsp;redirectStderr)</pre>
<div class="block">Execute a command and return the process running the command.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>command</code> - (undocumented)</dd>
<dd><code>workingDir</code> - (undocumented)</dd>
<dd><code>extraEnvironment</code> - (undocumented)</dd>
<dd><code>redirectStderr</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="executeAndGetOutput-scala.collection.Seq-java.io.File-scala.collection.Map-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>executeAndGetOutput</h4>
<pre>public static&nbsp;String&nbsp;executeAndGetOutput(scala.collection.Seq&lt;String&gt;&nbsp;command,
java.io.File&nbsp;workingDir,
scala.collection.Map&lt;String,String&gt;&nbsp;extraEnvironment,
boolean&nbsp;redirectStderr)</pre>
<div class="block">Execute a command and get its output, throwing an exception if it yields a code other than 0.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>command</code> - (undocumented)</dd>
<dd><code>workingDir</code> - (undocumented)</dd>
<dd><code>extraEnvironment</code> - (undocumented)</dd>
<dd><code>redirectStderr</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="processStreamByLine-java.lang.String-java.io.InputStream-scala.Function1-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>processStreamByLine</h4>
<pre>public static&nbsp;Thread&nbsp;processStreamByLine(String&nbsp;threadName,
java.io.InputStream&nbsp;inputStream,
scala.Function1&lt;String,scala.runtime.BoxedUnit&gt;&nbsp;processLine)</pre>
<div class="block">Return and start a daemon thread that processes the content of the input stream line by line.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>threadName</code> - (undocumented)</dd>
<dd><code>inputStream</code> - (undocumented)</dd>
<dd><code>processLine</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="tryOrExit-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>tryOrExit</h4>
<pre>public static&nbsp;void&nbsp;tryOrExit(scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;block)</pre>
<div class="block">Execute a block of code that evaluates to Unit, forwarding any uncaught exceptions to the
default UncaughtExceptionHandler
<p>
NOTE: This method is to be called by the spark-started JVM process.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>block</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="tryOrStopSparkContext-org.apache.spark.SparkContext-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>tryOrStopSparkContext</h4>
<pre>public static&nbsp;void&nbsp;tryOrStopSparkContext(<a href="../../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a>&nbsp;sc,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;block)</pre>
<div class="block">Execute a block of code that evaluates to Unit, stop SparkContext if there is any uncaught
exception
<p>
NOTE: This method is to be called by the driver-side components to avoid stopping the
user-started JVM process completely; in contrast, tryOrExit is to be called in the
spark-started JVM process .</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>sc</code> - (undocumented)</dd>
<dd><code>block</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="tryOrIOException-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>tryOrIOException</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;tryOrIOException(scala.Function0&lt;T&gt;&nbsp;block)</pre>
<div class="block">Execute a block of code that returns a value, re-throwing any non-fatal uncaught
exceptions as IOException. This is used when implementing Externalizable and Serializable's
read and write methods, since Java's serializer will not report non-IOExceptions properly;
see SPARK-4080 for more context.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>block</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="tryLogNonFatalError-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>tryLogNonFatalError</h4>
<pre>public static&nbsp;void&nbsp;tryLogNonFatalError(scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;block)</pre>
<div class="block">Executes the given block. Log non-fatal errors if any, and only throw fatal errors</div>
</li>
</ul>
<a name="tryWithSafeFinally-scala.Function0-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>tryWithSafeFinally</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;tryWithSafeFinally(scala.Function0&lt;T&gt;&nbsp;block,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;finallyBlock)</pre>
<div class="block">Execute a block of code, then a finally block, but if exceptions happen in
the finally block, do not suppress the original exception.
<p>
This is primarily an issue with <code>finally { out.close() }</code> blocks, where
close needs to be called to clean up <code>out</code>, but if an exception happened
in <code>out.write</code>, it's likely <code>out</code> may be corrupted and <code>out.close</code> will
fail as well. This would then suppress the original/likely more meaningful
exception from the original <code>out.write</code> call.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>block</code> - (undocumented)</dd>
<dd><code>finallyBlock</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="tryWithSafeFinallyAndFailureCallbacks-scala.Function0-scala.Function0-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>tryWithSafeFinallyAndFailureCallbacks</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;tryWithSafeFinallyAndFailureCallbacks(scala.Function0&lt;T&gt;&nbsp;block,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;catchBlock,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;finallyBlock)</pre>
<div class="block">Execute a block of code and call the failure callbacks in the catch block. If exceptions occur
in either the catch or the finally block, they are appended to the list of suppressed
exceptions in original exception which is then rethrown.
<p>
This is primarily an issue with <code>catch { abort() }</code> or <code>finally { out.close() }</code> blocks,
where the abort/close needs to be called to clean up <code>out</code>, but if an exception happened
in <code>out.write</code>, it's likely <code>out</code> may be corrupted and <code>abort</code> or <code>out.close</code> will
fail as well. This would then suppress the original/likely more meaningful
exception from the original <code>out.write</code> call.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>block</code> - (undocumented)</dd>
<dd><code>catchBlock</code> - (undocumented)</dd>
<dd><code>finallyBlock</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getCallSite-scala.Function1-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getCallSite</h4>
<pre>public static&nbsp;org.apache.spark.util.CallSite&nbsp;getCallSite(scala.Function1&lt;String,Object&gt;&nbsp;skipClass)</pre>
<div class="block">When called inside a class in the spark package, returns the name of the user code class
(outside the spark package) that called into Spark, as well as which Spark method they called.
This is used, for example, to tell users where in their code each RDD got created.
<p></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>skipClass</code> - Function that is used to exclude non-user-code classes.</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getFileLength-java.io.File-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getFileLength</h4>
<pre>public static&nbsp;long&nbsp;getFileLength(java.io.File&nbsp;file,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;workConf)</pre>
<div class="block">Return the file length, if the file is compressed it returns the uncompressed file length.
It also caches the uncompressed file size to avoid repeated decompression. The cache size is
read from workerConf.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>file</code> - (undocumented)</dd>
<dd><code>workConf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="offsetBytes-java.lang.String-long-long-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>offsetBytes</h4>
<pre>public static&nbsp;String&nbsp;offsetBytes(String&nbsp;path,
long&nbsp;length,
long&nbsp;start,
long&nbsp;end)</pre>
<div class="block">Return a string containing part of a file from byte 'start' to 'end'.</div>
</li>
</ul>
<a name="offsetBytes-scala.collection.Seq-scala.collection.Seq-long-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>offsetBytes</h4>
<pre>public static&nbsp;String&nbsp;offsetBytes(scala.collection.Seq&lt;java.io.File&gt;&nbsp;files,
scala.collection.Seq&lt;Object&gt;&nbsp;fileLengths,
long&nbsp;start,
long&nbsp;end)</pre>
<div class="block">Return a string containing data across a set of files. The <code>startIndex</code>
and <code>endIndex</code> is based on the cumulative size of all the files take in
the given order. See figure below for more details.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>files</code> - (undocumented)</dd>
<dd><code>fileLengths</code> - (undocumented)</dd>
<dd><code>start</code> - (undocumented)</dd>
<dd><code>end</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="clone-java.lang.Object-org.apache.spark.serializer.SerializerInstance-scala.reflect.ClassTag-">
<!-- -->
</a><a name="clone-T-org.apache.spark.serializer.SerializerInstance-scala.reflect.ClassTag-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>clone</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;clone(T&nbsp;value,
<a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a>&nbsp;serializer,
scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$2)</pre>
<div class="block">Clone an object using a Spark serializer.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>value</code> - (undocumented)</dd>
<dd><code>serializer</code> - (undocumented)</dd>
<dd><code>evidence$2</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="splitCommandString-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>splitCommandString</h4>
<pre>public static&nbsp;scala.collection.Seq&lt;String&gt;&nbsp;splitCommandString(String&nbsp;s)</pre>
<div class="block">Split a string of potentially quoted arguments from the command line the way that a shell
would do it to determine arguments to a command. For example, if the string is 'a "b c" d',
then it would be parsed as three arguments: 'a', 'b c' and 'd'.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>s</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="nonNegativeMod-int-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>nonNegativeMod</h4>
<pre>public static&nbsp;int&nbsp;nonNegativeMod(int&nbsp;x,
int&nbsp;mod)</pre>
</li>
</ul>
<a name="nonNegativeHash-java.lang.Object-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>nonNegativeHash</h4>
<pre>public static&nbsp;int&nbsp;nonNegativeHash(Object&nbsp;obj)</pre>
</li>
</ul>
<a name="getSystemProperties--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getSystemProperties</h4>
<pre>public static&nbsp;scala.collection.Map&lt;String,String&gt;&nbsp;getSystemProperties()</pre>
<div class="block">Returns the system properties map that is thread-safe to iterator over. It gets the
properties which have been set explicitly, as well as those for which only a default value
has been defined.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="times-int-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>times</h4>
<pre>public static&nbsp;void&nbsp;times(int&nbsp;numIters,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;f)</pre>
<div class="block">Method executed for repeating a task for side effects.
Unlike a for comprehension, it permits JVM JIT optimization</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>numIters</code> - (undocumented)</dd>
<dd><code>f</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="timeIt-int-scala.Function0-scala.Option-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>timeIt</h4>
<pre>public static&nbsp;long&nbsp;timeIt(int&nbsp;numIters,
scala.Function0&lt;scala.runtime.BoxedUnit&gt;&nbsp;f,
scala.Option&lt;scala.Function0&lt;scala.runtime.BoxedUnit&gt;&gt;&nbsp;prepare)</pre>
<div class="block">Timing method based on iterations that permit JVM JIT optimization.
<p></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>numIters</code> - number of iterations</dd>
<dd><code>f</code> - function to be executed. If prepare is not None, the running time of each call to f
must be an order of magnitude longer than one nanosecond for accurate timing.</dd>
<dd><code>prepare</code> - function to be executed before each call to f. Its running time doesn't count.</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>the total time across all iterations (not counting preparation time) in nanoseconds.</dd>
</dl>
</li>
</ul>
<a name="getIteratorSize-scala.collection.Iterator-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getIteratorSize</h4>
<pre>public static&nbsp;long&nbsp;getIteratorSize(scala.collection.Iterator&lt;?&gt;&nbsp;iterator)</pre>
<div class="block">Counts the number of elements of an iterator.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>iterator</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getIteratorZipWithIndex-scala.collection.Iterator-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getIteratorZipWithIndex</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;scala.collection.Iterator&lt;scala.Tuple2&lt;T,Object&gt;&gt;&nbsp;getIteratorZipWithIndex(scala.collection.Iterator&lt;T&gt;&nbsp;iter,
long&nbsp;startIndex)</pre>
<div class="block">Generate a zipWithIndex iterator, avoid index value overflowing problem
in scala's zipWithIndex</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>iter</code> - (undocumented)</dd>
<dd><code>startIndex</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="symlink-java.io.File-java.io.File-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>symlink</h4>
<pre>public static&nbsp;void&nbsp;symlink(java.io.File&nbsp;src,
java.io.File&nbsp;dst)</pre>
<div class="block">Creates a symlink.
<p></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>src</code> - absolute path to the source</dd>
<dd><code>dst</code> - relative path for the destination</dd>
</dl>
</li>
</ul>
<a name="getFormattedClassName-java.lang.Object-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getFormattedClassName</h4>
<pre>public static&nbsp;String&nbsp;getFormattedClassName(Object&nbsp;obj)</pre>
<div class="block">Return the class name of the given object, removing all dollar signs</div>
</li>
</ul>
<a name="getHadoopFileSystem-java.net.URI-org.apache.hadoop.conf.Configuration-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getHadoopFileSystem</h4>
<pre>public static&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;getHadoopFileSystem(java.net.URI&nbsp;path,
org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
<div class="block">Return a Hadoop FileSystem with the scheme encoded in the given path.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>path</code> - (undocumented)</dd>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getHadoopFileSystem-java.lang.String-org.apache.hadoop.conf.Configuration-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getHadoopFileSystem</h4>
<pre>public static&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;getHadoopFileSystem(String&nbsp;path,
org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
<div class="block">Return a Hadoop FileSystem with the scheme encoded in the given path.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>path</code> - (undocumented)</dd>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="isWindows--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isWindows</h4>
<pre>public static&nbsp;boolean&nbsp;isWindows()</pre>
<div class="block">Whether the underlying operating system is Windows.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="isMac--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isMac</h4>
<pre>public static&nbsp;boolean&nbsp;isMac()</pre>
<div class="block">Whether the underlying operating system is Mac OS X.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="isMacOnAppleSilicon--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isMacOnAppleSilicon</h4>
<pre>public static&nbsp;boolean&nbsp;isMacOnAppleSilicon()</pre>
<div class="block">Whether the underlying operating system is Mac OS X and processor is Apple Silicon.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="preferIPv6--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>preferIPv6</h4>
<pre>public static&nbsp;boolean&nbsp;preferIPv6()</pre>
<div class="block">Whether the underlying JVM prefer IPv6 addresses.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="windowsDrive--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>windowsDrive</h4>
<pre>public static&nbsp;scala.util.matching.Regex&nbsp;windowsDrive()</pre>
<div class="block">Pattern for matching a Windows drive, which contains only a single alphabet character.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="isTesting--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isTesting</h4>
<pre>public static&nbsp;boolean&nbsp;isTesting()</pre>
<div class="block">Indicates whether Spark is currently running unit tests.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="terminateProcess-java.lang.Process-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>terminateProcess</h4>
<pre>public static&nbsp;scala.Option&lt;Object&gt;&nbsp;terminateProcess(Process&nbsp;process,
long&nbsp;timeoutMs)</pre>
<div class="block">Terminates a process waiting for at most the specified duration.
<p></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>process</code> - (undocumented)</dd>
<dd><code>timeoutMs</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>the process exit value if it was successfully terminated, else None</dd>
</dl>
</li>
</ul>
<a name="getStderr-java.lang.Process-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getStderr</h4>
<pre>public static&nbsp;scala.Option&lt;String&gt;&nbsp;getStderr(Process&nbsp;process,
long&nbsp;timeoutMs)</pre>
<div class="block">Return the stderr of a process after waiting for the process to terminate.
If the process does not terminate within the specified timeout, return None.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>process</code> - (undocumented)</dd>
<dd><code>timeoutMs</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="logUncaughtExceptions-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>logUncaughtExceptions</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;logUncaughtExceptions(scala.Function0&lt;T&gt;&nbsp;f)</pre>
<div class="block">Execute the given block, logging and re-throwing any uncaught exception.
This is particularly useful for wrapping code that runs in a thread, to ensure
that exceptions are printed, and to avoid having to catch Throwable.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>f</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="tryLog-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>tryLog</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;scala.util.Try&lt;T&gt;&nbsp;tryLog(scala.Function0&lt;T&gt;&nbsp;f)</pre>
<div class="block">Executes the given block in a Try, logging any uncaught exceptions.</div>
</li>
</ul>
<a name="isFatalError-java.lang.Throwable-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isFatalError</h4>
<pre>public static&nbsp;boolean&nbsp;isFatalError(Throwable&nbsp;e)</pre>
<div class="block">Returns true if the given exception was fatal. See docs for scala.util.control.NonFatal.</div>
</li>
</ul>
<a name="resolveURI-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>resolveURI</h4>
<pre>public static&nbsp;java.net.URI&nbsp;resolveURI(String&nbsp;path)</pre>
<div class="block">Return a well-formed URI for the file described by a user input string.
<p>
If the supplied path does not contain a scheme, or is a relative path, it will be
converted into an absolute path with a file:// scheme.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>path</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="resolveURIs-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>resolveURIs</h4>
<pre>public static&nbsp;String&nbsp;resolveURIs(String&nbsp;paths)</pre>
<div class="block">Resolve a comma-separated list of paths.</div>
</li>
</ul>
<a name="isAbsoluteURI-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isAbsoluteURI</h4>
<pre>public static&nbsp;boolean&nbsp;isAbsoluteURI(String&nbsp;path)</pre>
<div class="block">Check whether a path is an absolute URI.</div>
</li>
</ul>
<a name="nonLocalPaths-java.lang.String-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>nonLocalPaths</h4>
<pre>public static&nbsp;String[]&nbsp;nonLocalPaths(String&nbsp;paths,
boolean&nbsp;testWindows)</pre>
<div class="block">Return all non-local paths from a comma-separated list of paths.</div>
</li>
</ul>
<a name="loadDefaultSparkProperties-org.apache.spark.SparkConf-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>loadDefaultSparkProperties</h4>
<pre>public static&nbsp;String&nbsp;loadDefaultSparkProperties(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
String&nbsp;filePath)</pre>
<div class="block">Load default Spark properties from the given file. If no file is provided,
use the common defaults file. This mutates state in the given SparkConf and
in this JVM's system properties if the config specified in the file is not
already set. Return the path of the properties file used.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dd><code>filePath</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="updateSparkConfigFromProperties-org.apache.spark.SparkConf-scala.collection.Map-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>updateSparkConfigFromProperties</h4>
<pre>public static&nbsp;void&nbsp;updateSparkConfigFromProperties(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
scala.collection.Map&lt;String,String&gt;&nbsp;properties)</pre>
<div class="block">Updates Spark config with properties from a set of Properties.
Provided properties have the highest priority.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dd><code>properties</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="getPropertiesFromFile-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getPropertiesFromFile</h4>
<pre>public static&nbsp;scala.collection.Map&lt;String,String&gt;&nbsp;getPropertiesFromFile(String&nbsp;filename)</pre>
<div class="block">Load properties present in the given file.</div>
</li>
</ul>
<a name="getDefaultPropertiesFile-scala.collection.Map-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getDefaultPropertiesFile</h4>
<pre>public static&nbsp;String&nbsp;getDefaultPropertiesFile(scala.collection.Map&lt;String,String&gt;&nbsp;env)</pre>
<div class="block">Return the path of the default Spark properties file.</div>
</li>
</ul>
<a name="exceptionString-java.lang.Throwable-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>exceptionString</h4>
<pre>public static&nbsp;String&nbsp;exceptionString(Throwable&nbsp;e)</pre>
<div class="block">Return a nice string representation of the exception. It will call "printStackTrace" to
recursively generate the stack trace including the exception and its causes.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>e</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getThreadDump--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getThreadDump</h4>
<pre>public static&nbsp;<a href="../../../../org/apache/spark/status/api/v1/ThreadStackTrace.html" title="class in org.apache.spark.status.api.v1">ThreadStackTrace</a>[]&nbsp;getThreadDump()</pre>
<div class="block">Return a thread dump of all threads' stacktraces. Used to capture dumps for the web UI</div>
</li>
</ul>
<a name="getThreadDumpForThread-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getThreadDumpForThread</h4>
<pre>public static&nbsp;scala.Option&lt;<a href="../../../../org/apache/spark/status/api/v1/ThreadStackTrace.html" title="class in org.apache.spark.status.api.v1">ThreadStackTrace</a>&gt;&nbsp;getThreadDumpForThread(long&nbsp;threadId)</pre>
</li>
</ul>
<a name="sparkJavaOpts-org.apache.spark.SparkConf-scala.Function1-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>sparkJavaOpts</h4>
<pre>public static&nbsp;scala.collection.Seq&lt;String&gt;&nbsp;sparkJavaOpts(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
scala.Function1&lt;String,Object&gt;&nbsp;filterKey)</pre>
<div class="block">Convert all spark properties set in the given SparkConf to a sequence of java options.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dd><code>filterKey</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="portMaxRetries-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>portMaxRetries</h4>
<pre>public static&nbsp;int&nbsp;portMaxRetries(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
<div class="block">Maximum number of retries when binding to a port before giving up.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="userPort-int-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>userPort</h4>
<pre>public static&nbsp;int&nbsp;userPort(int&nbsp;base,
int&nbsp;offset)</pre>
<div class="block">Returns the user port to try when trying to bind a service. Handles wrapping and skipping
privileged ports.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>base</code> - (undocumented)</dd>
<dd><code>offset</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="startServiceOnPort-int-scala.Function1-org.apache.spark.SparkConf-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>startServiceOnPort</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;scala.Tuple2&lt;T,Object&gt;&nbsp;startServiceOnPort(int&nbsp;startPort,
scala.Function1&lt;Object,scala.Tuple2&lt;T,Object&gt;&gt;&nbsp;startService,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
String&nbsp;serviceName)</pre>
<div class="block">Attempt to start a service on the given port, or fail after a number of attempts.
Each subsequent attempt uses 1 + the port used in the previous attempt (unless the port is 0).
<p></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>startPort</code> - The initial port to start the service on.</dd>
<dd><code>startService</code> - Function to start service on a given port.
This is expected to throw java.net.BindException on port collision.</dd>
<dd><code>conf</code> - A SparkConf used to get the maximum number of retries when binding to a port.</dd>
<dd><code>serviceName</code> - Name of the service.</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(service: T, port: Int)</dd>
</dl>
</li>
</ul>
<a name="isBindCollision-java.lang.Throwable-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isBindCollision</h4>
<pre>public static&nbsp;boolean&nbsp;isBindCollision(Throwable&nbsp;exception)</pre>
<div class="block">Return whether the exception is caused by an address-port collision when binding.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>exception</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="setLogLevel-org.apache.logging.log4j.Level-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setLogLevel</h4>
<pre>public static&nbsp;void&nbsp;setLogLevel(org.apache.logging.log4j.Level&nbsp;l)</pre>
<div class="block">configure a new log4j level</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>l</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="libraryPathEnvName--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>libraryPathEnvName</h4>
<pre>public static&nbsp;String&nbsp;libraryPathEnvName()</pre>
<div class="block">Return the current system LD_LIBRARY_PATH name</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="libraryPathEnvPrefix-scala.collection.Seq-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>libraryPathEnvPrefix</h4>
<pre>public static&nbsp;String&nbsp;libraryPathEnvPrefix(scala.collection.Seq&lt;String&gt;&nbsp;libraryPaths)</pre>
<div class="block">Return the prefix of a command that appends the given library paths to the
system-specific library path environment variable. On Unix, for instance,
this returns the string LD_LIBRARY_PATH="path1:path2:$LD_LIBRARY_PATH".</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>libraryPaths</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getSparkOrYarnConfig-org.apache.spark.SparkConf-java.lang.String-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getSparkOrYarnConfig</h4>
<pre>public static&nbsp;String&nbsp;getSparkOrYarnConfig(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
String&nbsp;key,
String&nbsp;default_)</pre>
<div class="block">Return the value of a config either through the SparkConf or the Hadoop configuration.
We Check whether the key is set in the SparkConf before look at any Hadoop configuration.
If the key is set in SparkConf, no matter whether it is running on YARN or not,
gets the value from SparkConf.
Only when the key is not set in SparkConf and running on YARN,
gets the value from Hadoop configuration.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dd><code>key</code> - (undocumented)</dd>
<dd><code>default_</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="extractHostPortFromSparkUrl-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>extractHostPortFromSparkUrl</h4>
<pre>public static&nbsp;scala.Tuple2&lt;String,Object&gt;&nbsp;extractHostPortFromSparkUrl(String&nbsp;sparkUrl)
throws <a href="../../../../org/apache/spark/SparkException.html" title="class in org.apache.spark">SparkException</a></pre>
<div class="block">Return a pair of host and port extracted from the <code>sparkUrl</code>.
<p>
A spark url (<code>spark://host:port</code>) is a special URI that its scheme is <code>spark</code> and only contains
host and port.
<p></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>sparkUrl</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
<dt><span class="throwsLabel">Throws:</span></dt>
<dd><code><a href="../../../../org/apache/spark/SparkException.html" title="class in org.apache.spark">SparkException</a></code> - if sparkUrl is invalid.</dd>
</dl>
</li>
</ul>
<a name="getCurrentUserName--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getCurrentUserName</h4>
<pre>public static&nbsp;String&nbsp;getCurrentUserName()</pre>
<div class="block">Returns the current user name. This is the currently logged in user, unless that's been
overridden by the <code>SPARK_USER</code> environment variable.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="EMPTY_USER_GROUPS--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>EMPTY_USER_GROUPS</h4>
<pre>public static&nbsp;scala.collection.immutable.Set&lt;String&gt;&nbsp;EMPTY_USER_GROUPS()</pre>
</li>
</ul>
<a name="getCurrentUserGroups-org.apache.spark.SparkConf-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getCurrentUserGroups</h4>
<pre>public static&nbsp;scala.collection.immutable.Set&lt;String&gt;&nbsp;getCurrentUserGroups(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;sparkConf,
String&nbsp;username)</pre>
</li>
</ul>
<a name="parseStandaloneMasterUrls-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>parseStandaloneMasterUrls</h4>
<pre>public static&nbsp;String[]&nbsp;parseStandaloneMasterUrls(String&nbsp;masterUrls)</pre>
<div class="block">Split the comma delimited string of master URLs into a list.
For instance, "spark://abc,def" becomes [spark://abc, spark://def].</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>masterUrls</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="BACKUP_STANDALONE_MASTER_PREFIX--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>BACKUP_STANDALONE_MASTER_PREFIX</h4>
<pre>public static&nbsp;String&nbsp;BACKUP_STANDALONE_MASTER_PREFIX()</pre>
<div class="block">An identifier that backup masters use in their responses.</div>
</li>
</ul>
<a name="responseFromBackup-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>responseFromBackup</h4>
<pre>public static&nbsp;boolean&nbsp;responseFromBackup(String&nbsp;msg)</pre>
<div class="block">Return true if the response message is sent from a backup Master on standby.</div>
</li>
</ul>
<a name="withDummyCallSite-org.apache.spark.SparkContext-scala.Function0-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>withDummyCallSite</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;withDummyCallSite(<a href="../../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a>&nbsp;sc,
scala.Function0&lt;T&gt;&nbsp;body)</pre>
<div class="block">To avoid calling <code>Utils.getCallSite</code> for every single RDD we create in the body,
set a dummy call site that RDDs use instead. This is for performance optimization.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>sc</code> - (undocumented)</dd>
<dd><code>body</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="isInDirectory-java.io.File-java.io.File-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isInDirectory</h4>
<pre>public static&nbsp;boolean&nbsp;isInDirectory(java.io.File&nbsp;parent,
java.io.File&nbsp;child)</pre>
<div class="block">Return whether the specified file is a parent directory of the child file.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>parent</code> - (undocumented)</dd>
<dd><code>child</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="isLocalMaster-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isLocalMaster</h4>
<pre>public static&nbsp;boolean&nbsp;isLocalMaster(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>whether it is local mode</dd>
</dl>
</li>
</ul>
<a name="isPushBasedShuffleEnabled-org.apache.spark.SparkConf-boolean-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isPushBasedShuffleEnabled</h4>
<pre>public static&nbsp;boolean&nbsp;isPushBasedShuffleEnabled(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
boolean&nbsp;isDriver,
boolean&nbsp;checkSerializer)</pre>
<div class="block">Push based shuffle can only be enabled when below conditions are met:
- the application is submitted to run in YARN mode
- external shuffle service enabled
- IO encryption disabled
- serializer(such as KryoSerializer) supports relocation of serialized objects</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dd><code>isDriver</code> - (undocumented)</dd>
<dd><code>checkSerializer</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="instantiateSerializerOrShuffleManager-java.lang.String-org.apache.spark.SparkConf-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>instantiateSerializerOrShuffleManager</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;instantiateSerializerOrShuffleManager(String&nbsp;className,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
boolean&nbsp;isDriver)</pre>
</li>
</ul>
<a name="instantiateSerializerFromConf-org.apache.spark.internal.config.ConfigEntry-org.apache.spark.SparkConf-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>instantiateSerializerFromConf</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;T&nbsp;instantiateSerializerFromConf(org.apache.spark.internal.config.ConfigEntry&lt;String&gt;&nbsp;propertyName,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
boolean&nbsp;isDriver)</pre>
</li>
</ul>
<a name="isDynamicAllocationEnabled-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isDynamicAllocationEnabled</h4>
<pre>public static&nbsp;boolean&nbsp;isDynamicAllocationEnabled(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
<div class="block">Return whether dynamic allocation is enabled in the given conf.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="isStreamingDynamicAllocationEnabled-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isStreamingDynamicAllocationEnabled</h4>
<pre>public static&nbsp;boolean&nbsp;isStreamingDynamicAllocationEnabled(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
</li>
</ul>
<a name="getDynamicAllocationInitialExecutors-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getDynamicAllocationInitialExecutors</h4>
<pre>public static&nbsp;int&nbsp;getDynamicAllocationInitialExecutors(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
<div class="block">Return the initial number of executors for dynamic allocation.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="tryWithResource-scala.Function0-scala.Function1-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>tryWithResource</h4>
<pre>public static&nbsp;&lt;R extends java.io.Closeable,T&gt;&nbsp;T&nbsp;tryWithResource(scala.Function0&lt;R&gt;&nbsp;createResource,
scala.Function1&lt;R,T&gt;&nbsp;f)</pre>
</li>
</ul>
<a name="tempFileWith-java.io.File-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>tempFileWith</h4>
<pre>public static&nbsp;java.io.File&nbsp;tempFileWith(java.io.File&nbsp;path)</pre>
<div class="block">Returns a path of temporary file which is in the same directory with <code>path</code>.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>path</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getProcessName--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getProcessName</h4>
<pre>public static&nbsp;String&nbsp;getProcessName()</pre>
<div class="block">Returns the name of this JVM process. This is OS dependent but typically (OSX, Linux, Windows),
this is formatted as PID@hostname.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="initDaemon-org.slf4j.Logger-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>initDaemon</h4>
<pre>public static&nbsp;void&nbsp;initDaemon(org.slf4j.Logger&nbsp;log)</pre>
<div class="block">Utility function that should be called early in <code>main()</code> for daemons to set up some common
diagnostic state.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>log</code> - (undocumented)</dd>
</dl>
</li>
</ul>
<a name="getUserJars-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getUserJars</h4>
<pre>public static&nbsp;scala.collection.Seq&lt;String&gt;&nbsp;getUserJars(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
<div class="block">Return the jar files pointed by the "spark.jars" property. Spark internally will distribute
these jars through file server. In the YARN mode, it will return an empty list, since YARN
has its own mechanism to distribute jars.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getLocalUserJarsForShell-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getLocalUserJarsForShell</h4>
<pre>public static&nbsp;scala.collection.Seq&lt;String&gt;&nbsp;getLocalUserJarsForShell(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
<div class="block">Return the local jar files which will be added to REPL's classpath. These jar files are
specified by --jars (spark.jars) or --packages, remote jars will be downloaded to local by
SparkSubmit at first.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="redact-org.apache.spark.SparkConf-scala.collection.Seq-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>redact</h4>
<pre>public static&nbsp;scala.collection.Seq&lt;scala.Tuple2&lt;String,String&gt;&gt;&nbsp;redact(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
scala.collection.Seq&lt;scala.Tuple2&lt;String,String&gt;&gt;&nbsp;kvs)</pre>
<div class="block">Redact the sensitive values in the given map. If a map key matches the redaction pattern then
its value is replaced with a dummy text.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>conf</code> - (undocumented)</dd>
<dd><code>kvs</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="redact-scala.Option-scala.collection.Seq-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>redact</h4>
<pre>public static&nbsp;&lt;K,V&gt;&nbsp;scala.collection.Seq&lt;scala.Tuple2&lt;K,V&gt;&gt;&nbsp;redact(scala.Option&lt;scala.util.matching.Regex&gt;&nbsp;regex,
scala.collection.Seq&lt;scala.Tuple2&lt;K,V&gt;&gt;&nbsp;kvs)</pre>
<div class="block">Redact the sensitive values in the given map. If a map key matches the redaction pattern then
its value is replaced with a dummy text.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>regex</code> - (undocumented)</dd>
<dd><code>kvs</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="redact-scala.Option-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>redact</h4>
<pre>public static&nbsp;String&nbsp;redact(scala.Option&lt;scala.util.matching.Regex&gt;&nbsp;regex,
String&nbsp;text)</pre>
<div class="block">Redact the sensitive information in the given string.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>regex</code> - (undocumented)</dd>
<dd><code>text</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="redact-scala.collection.Map-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>redact</h4>
<pre>public static&nbsp;scala.collection.Seq&lt;scala.Tuple2&lt;String,String&gt;&gt;&nbsp;redact(scala.collection.Map&lt;String,String&gt;&nbsp;kvs)</pre>
<div class="block">Looks up the redaction regex from within the key value pairs and uses it to redact the rest
of the key value pairs. No care is taken to make sure the redaction property itself is not
redacted. So theoretically, the property itself could be configured to redact its own value
when printing.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>kvs</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="redactCommandLineArgs-org.apache.spark.SparkConf-scala.collection.Seq-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>redactCommandLineArgs</h4>
<pre>public static&nbsp;scala.collection.Seq&lt;String&gt;&nbsp;redactCommandLineArgs(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf,
scala.collection.Seq&lt;String&gt;&nbsp;commands)</pre>
</li>
</ul>
<a name="stringToSeq-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>stringToSeq</h4>
<pre>public static&nbsp;scala.collection.Seq&lt;String&gt;&nbsp;stringToSeq(String&nbsp;str)</pre>
</li>
</ul>
<a name="loadExtensions-java.lang.Class-scala.collection.Seq-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>loadExtensions</h4>
<pre>public static&nbsp;&lt;T&gt;&nbsp;scala.collection.Seq&lt;T&gt;&nbsp;loadExtensions(Class&lt;T&gt;&nbsp;extClass,
scala.collection.Seq&lt;String&gt;&nbsp;classes,
<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
<div class="block">Create instances of extension classes.
<p>
The classes in the given list must:
- Be sub-classes of the given base class.
- Provide either a no-arg constructor, or a 1-arg constructor that takes a SparkConf.
<p>
The constructors are allowed to throw "UnsupportedOperationException" if the extension does not
want to be registered; this allows the implementations to check the Spark configuration (or
other state) and decide they do not need to be added. A log message is printed in that case.
Other exceptions are bubbled up.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>extClass</code> - (undocumented)</dd>
<dd><code>classes</code> - (undocumented)</dd>
<dd><code>conf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="checkAndGetK8sMasterUrl-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>checkAndGetK8sMasterUrl</h4>
<pre>public static&nbsp;String&nbsp;checkAndGetK8sMasterUrl(String&nbsp;rawMasterURL)</pre>
<div class="block">Check the validity of the given Kubernetes master URL and return the resolved URL. Prefix
"k8s://" is appended to the resolved URL as the prefix is used by KubernetesClusterManager
in canCreate to determine if the KubernetesClusterManager should be used.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>rawMasterURL</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="substituteAppNExecIds-java.lang.String-java.lang.String-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>substituteAppNExecIds</h4>
<pre>public static&nbsp;String&nbsp;substituteAppNExecIds(String&nbsp;opt,
String&nbsp;appId,
String&nbsp;execId)</pre>
<div class="block">Replaces all the {{EXECUTOR_ID}} occurrences with the Executor Id
and {{APP_ID}} occurrences with the App Id.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>opt</code> - (undocumented)</dd>
<dd><code>appId</code> - (undocumented)</dd>
<dd><code>execId</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="substituteAppId-java.lang.String-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>substituteAppId</h4>
<pre>public static&nbsp;String&nbsp;substituteAppId(String&nbsp;opt,
String&nbsp;appId)</pre>
<div class="block">Replaces all the {{APP_ID}} occurrences with the App Id.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>opt</code> - (undocumented)</dd>
<dd><code>appId</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="createSecret-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>createSecret</h4>
<pre>public static&nbsp;String&nbsp;createSecret(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
</li>
</ul>
<a name="isMemberClass-java.lang.Class-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isMemberClass</h4>
<pre>public static&nbsp;boolean&nbsp;isMemberClass(Class&lt;?&gt;&nbsp;cls)</pre>
<div class="block">Returns true if and only if the underlying class is a member class.
<p>
Note: jdk8u throws a "Malformed class name" error if a given class is a deeply-nested
inner class (See SPARK-34607 for details). This issue has already been fixed in jdk9+, so
we can remove this helper method safely if we drop the support of jdk8u.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>cls</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="getSimpleName-java.lang.Class-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getSimpleName</h4>
<pre>public static&nbsp;String&nbsp;getSimpleName(Class&lt;?&gt;&nbsp;cls)</pre>
<div class="block">Safer than Class obj's getSimpleName which may throw Malformed class name error in scala.
This method mimics scalatest's getSimpleNameOfAnObjectsClass.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>cls</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="stripDollars-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>stripDollars</h4>
<pre>public static&nbsp;String&nbsp;stripDollars(String&nbsp;s)</pre>
<div class="block">Remove trailing dollar signs from qualified class name,
and return the trailing part after the last dollar sign in the middle</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>s</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="stringHalfWidth-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>stringHalfWidth</h4>
<pre>public static&nbsp;int&nbsp;stringHalfWidth(String&nbsp;str)</pre>
<div class="block">Return the number of half widths in a given string. Note that a full width character
occupies two half widths.
<p>
For a string consisting of 1 million characters, the execution of this method requires
about 50ms.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>str</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="sanitizeDirName-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>sanitizeDirName</h4>
<pre>public static&nbsp;String&nbsp;sanitizeDirName(String&nbsp;str)</pre>
</li>
</ul>
<a name="isClientMode-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isClientMode</h4>
<pre>public static&nbsp;boolean&nbsp;isClientMode(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
</li>
</ul>
<a name="isLocalUri-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isLocalUri</h4>
<pre>public static&nbsp;boolean&nbsp;isLocalUri(String&nbsp;uri)</pre>
<div class="block">Returns whether the URI is a "local:" URI.</div>
</li>
</ul>
<a name="isFileSplittable-org.apache.hadoop.fs.Path-org.apache.hadoop.io.compress.CompressionCodecFactory-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isFileSplittable</h4>
<pre>public static&nbsp;boolean&nbsp;isFileSplittable(org.apache.hadoop.fs.Path&nbsp;path,
org.apache.hadoop.io.compress.CompressionCodecFactory&nbsp;codecFactory)</pre>
<div class="block">Check whether the file of the path is splittable.</div>
</li>
</ul>
<a name="cloneProperties-java.util.Properties-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>cloneProperties</h4>
<pre>public static&nbsp;java.util.Properties&nbsp;cloneProperties(java.util.Properties&nbsp;props)</pre>
<div class="block">Create a new properties object with the same values as `props`</div>
</li>
</ul>
<a name="buildLocationMetadata-scala.collection.Seq-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>buildLocationMetadata</h4>
<pre>public static&nbsp;String&nbsp;buildLocationMetadata(scala.collection.Seq&lt;org.apache.hadoop.fs.Path&gt;&nbsp;paths,
int&nbsp;stopAppendingThreshold)</pre>
<div class="block">Convert a sequence of <code>Path</code>s to a metadata string. When the length of metadata string
exceeds <code>stopAppendingThreshold</code>, stop appending paths for saving memory.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>paths</code> - (undocumented)</dd>
<dd><code>stopAppendingThreshold</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="executorOffHeapMemorySizeAsMb-org.apache.spark.SparkConf-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>executorOffHeapMemorySizeAsMb</h4>
<pre>public static&nbsp;int&nbsp;executorOffHeapMemorySizeAsMb(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;sparkConf)</pre>
<div class="block">Convert MEMORY_OFFHEAP_SIZE to MB Unit, return 0 if MEMORY_OFFHEAP_ENABLED is false.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>sparkConf</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="checkOffHeapEnabled-org.apache.spark.SparkConf-long-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>checkOffHeapEnabled</h4>
<pre>public static&nbsp;long&nbsp;checkOffHeapEnabled(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;sparkConf,
long&nbsp;offHeapSize)</pre>
<div class="block">return 0 if MEMORY_OFFHEAP_ENABLED is false.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>sparkConf</code> - (undocumented)</dd>
<dd><code>offHeapSize</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="createFailedToGetTokenMessage-java.lang.String-java.lang.Throwable-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>createFailedToGetTokenMessage</h4>
<pre>public static&nbsp;String&nbsp;createFailedToGetTokenMessage(String&nbsp;serviceName,
Throwable&nbsp;e)</pre>
<div class="block">Returns a string message about delegation token generation failure</div>
</li>
</ul>
<a name="unzipFilesFromFile-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.io.File-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>unzipFilesFromFile</h4>
<pre>public static&nbsp;scala.collection.Seq&lt;java.io.File&gt;&nbsp;unzipFilesFromFile(org.apache.hadoop.fs.FileSystem&nbsp;fs,
org.apache.hadoop.fs.Path&nbsp;dfsZipFile,
java.io.File&nbsp;localDir)</pre>
<div class="block">Decompress a zip file into a local dir. File names are read from the zip file. Note, we skip
addressing the directory here. Also, we rely on the caller side to address any exceptions.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>fs</code> - (undocumented)</dd>
<dd><code>dfsZipFile</code> - (undocumented)</dd>
<dd><code>localDir</code> - (undocumented)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>(undocumented)</dd>
</dl>
</li>
</ul>
<a name="median-long:A-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>median</h4>
<pre>public static&nbsp;long&nbsp;median(long[]&nbsp;sizes,
boolean&nbsp;alreadySorted)</pre>
<div class="block">Return the median number of a long array
<p></div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>sizes</code> - </dd>
<dd><code>alreadySorted</code> - </dd>
<dt><span class="returnLabel">Returns:</span></dt>
</dl>
</li>
</ul>
<a name="isG1GC--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isG1GC</h4>
<pre>public static&nbsp;boolean&nbsp;isG1GC()</pre>
</li>
</ul>
<a name="org:Dapache:Dspark:Dinternal:DLogging:D:Dlog_--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>org$apache$spark$internal$Logging$$log_</h4>
<pre>public static&nbsp;org.slf4j.Logger&nbsp;org$apache$spark$internal$Logging$$log_()</pre>
</li>
</ul>
<a name="org:Dapache:Dspark:Dinternal:DLogging:D:Dlog__:Deq-org.slf4j.Logger-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>org$apache$spark$internal$Logging$$log__$eq</h4>
<pre>public static&nbsp;void&nbsp;org$apache$spark$internal$Logging$$log__$eq(org.slf4j.Logger&nbsp;x$1)</pre>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/apache/spark/util/ThreadUtils.html" title="class in org.apache.spark.util"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
<li><a href="../../../../org/apache/spark/util/VersionUtils.html" title="class in org.apache.spark.util"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/apache/spark/util/Utils.html" target="_top">Frames</a></li>
<li><a href="Utils.html" target="_top">No&nbsp;Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary:&nbsp;</li>
<li>Nested&nbsp;|&nbsp;</li>
<li>Field&nbsp;|&nbsp;</li>
<li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail:&nbsp;</li>
<li>Field&nbsp;|&nbsp;</li>
<li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<script defer="defer" type="text/javascript" src="../../../../lib/jquery.js"></script><script defer="defer" type="text/javascript" src="../../../../lib/api-javadocs.js"></script></body>
</html>