| <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> |
| <!-- NewPage --> |
| <html lang="en"> |
| <head> |
| <!-- Generated by javadoc (1.8.0_402) on Mon Apr 15 02:01:53 UTC 2024 --> |
| <title>Utils (Spark 3.4.3 JavaDoc)</title> |
| <meta name="date" content="2024-04-15"> |
| <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> |
| <script type="text/javascript" src="../../../../script.js"></script> |
| </head> |
| <body> |
| <script type="text/javascript"><!-- |
| try { |
| if (location.href.indexOf('is-external=true') == -1) { |
| parent.document.title="Utils (Spark 3.4.3 JavaDoc)"; |
| } |
| } |
| catch(err) { |
| } |
| //--> |
| var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":9,"i50":9,"i51":9,"i52":9,"i53":9,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":9,"i82":9,"i83":9,"i84":9,"i85":9,"i86":9,"i87":9,"i88":9,"i89":9,"i90":9,"i91":9,"i92":9,"i93":9,"i94":9,"i95":9,"i96":9,"i97":9,"i98":9,"i99":9,"i100":9,"i101":9,"i102":9,"i103":9,"i104":9,"i105":9,"i106":9,"i107":9,"i108":9,"i109":9,"i110":9,"i111":9,"i112":9,"i113":9,"i114":9,"i115":9,"i116":9,"i117":9,"i118":9,"i119":9,"i120":9,"i121":9,"i122":9,"i123":9,"i124":9,"i125":9,"i126":9,"i127":9,"i128":9,"i129":9,"i130":9,"i131":9,"i132":9,"i133":9,"i134":9,"i135":9,"i136":9,"i137":9,"i138":9,"i139":9,"i140":9,"i141":9,"i142":9,"i143":9,"i144":9,"i145":9,"i146":9,"i147":9,"i148":9,"i149":9,"i150":9,"i151":9,"i152":9,"i153":9,"i154":9,"i155":9,"i156":9,"i157":9,"i158":9,"i159":9,"i160":9,"i161":9,"i162":9,"i163":9,"i164":9,"i165":9,"i166":9,"i167":9}; |
| var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]}; |
| var altColor = "altColor"; |
| var rowColor = "rowColor"; |
| var tableTab = "tableTab"; |
| var activeTableTab = "activeTableTab"; |
| </script> |
| <noscript> |
| <div>JavaScript is disabled on your browser.</div> |
| </noscript> |
| <!-- ========= START OF TOP NAVBAR ======= --> |
| <div class="topNav"><a name="navbar.top"> |
| <!-- --> |
| </a> |
| <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> |
| <a name="navbar.top.firstrow"> |
| <!-- --> |
| </a> |
| <ul class="navList" title="Navigation"> |
| <li><a href="../../../../overview-summary.html">Overview</a></li> |
| <li><a href="package-summary.html">Package</a></li> |
| <li class="navBarCell1Rev">Class</li> |
| <li><a href="package-tree.html">Tree</a></li> |
| <li><a href="../../../../deprecated-list.html">Deprecated</a></li> |
| <li><a href="../../../../index-all.html">Index</a></li> |
| <li><a href="../../../../help-doc.html">Help</a></li> |
| </ul> |
| </div> |
| <div class="subNav"> |
| <ul class="navList"> |
| <li><a href="../../../../org/apache/spark/util/ThreadUtils.html" title="class in org.apache.spark.util"><span class="typeNameLink">Prev Class</span></a></li> |
| <li><a href="../../../../org/apache/spark/util/VersionUtils.html" title="class in org.apache.spark.util"><span class="typeNameLink">Next Class</span></a></li> |
| </ul> |
| <ul class="navList"> |
| <li><a href="../../../../index.html?org/apache/spark/util/Utils.html" target="_top">Frames</a></li> |
| <li><a href="Utils.html" target="_top">No Frames</a></li> |
| </ul> |
| <ul class="navList" id="allclasses_navbar_top"> |
| <li><a href="../../../../allclasses-noframe.html">All Classes</a></li> |
| </ul> |
| <div> |
| <script type="text/javascript"><!-- |
| allClassesLink = document.getElementById("allclasses_navbar_top"); |
| if(window==top) { |
| allClassesLink.style.display = "block"; |
| } |
| else { |
| allClassesLink.style.display = "none"; |
| } |
| //--> |
| </script> |
| </div> |
| <div> |
| <ul class="subNavList"> |
| <li>Summary: </li> |
| <li>Nested | </li> |
| <li>Field | </li> |
| <li><a href="#constructor.summary">Constr</a> | </li> |
| <li><a href="#method.summary">Method</a></li> |
| </ul> |
| <ul class="subNavList"> |
| <li>Detail: </li> |
| <li>Field | </li> |
| <li><a href="#constructor.detail">Constr</a> | </li> |
| <li><a href="#method.detail">Method</a></li> |
| </ul> |
| </div> |
| <a name="skip.navbar.top"> |
| <!-- --> |
| </a></div> |
| <!-- ========= END OF TOP NAVBAR ========= --> |
| <!-- ======== START OF CLASS DATA ======== --> |
| <div class="header"> |
| <div class="subTitle">org.apache.spark.util</div> |
| <h2 title="Class Utils" class="title">Class Utils</h2> |
| </div> |
| <div class="contentContainer"> |
| <ul class="inheritance"> |
| <li>Object</li> |
| <li> |
| <ul class="inheritance"> |
| <li>org.apache.spark.util.Utils</li> |
| </ul> |
| </li> |
| </ul> |
| <div class="description"> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <hr> |
| <br> |
| <pre>public class <span class="typeNameLabel">Utils</span> |
| extends Object</pre> |
| <div class="block">Various utility methods used by Spark.</div> |
| </li> |
| </ul> |
| </div> |
| <div class="summary"> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <!-- ======== CONSTRUCTOR SUMMARY ======== --> |
| <ul class="blockList"> |
| <li class="blockList"><a name="constructor.summary"> |
| <!-- --> |
| </a> |
| <h3>Constructor Summary</h3> |
| <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation"> |
| <caption><span>Constructors</span><span class="tabEnd"> </span></caption> |
| <tr> |
| <th class="colOne" scope="col">Constructor and Description</th> |
| </tr> |
| <tr class="altColor"> |
| <td class="colOne"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#Utils--">Utils</a></span>()</code> </td> |
| </tr> |
| </table> |
| </li> |
| </ul> |
| <!-- ========== METHOD SUMMARY =========== --> |
| <ul class="blockList"> |
| <li class="blockList"><a name="method.summary"> |
| <!-- --> |
| </a> |
| <h3>Method Summary</h3> |
| <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation"> |
| <caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t1" class="tableTab"><span><a href="javascript:show(1);">Static Methods</a></span><span class="tabEnd"> </span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd"> </span></span></caption> |
| <tr> |
| <th class="colFirst" scope="col">Modifier and Type</th> |
| <th class="colLast" scope="col">Method and Description</th> |
| </tr> |
| <tr id="i0" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#BACKUP_STANDALONE_MASTER_PREFIX--">BACKUP_STANDALONE_MASTER_PREFIX</a></span>()</code> |
| <div class="block">An identifier that backup masters use in their responses.</div> |
| </td> |
| </tr> |
| <tr id="i1" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#buildLocationMetadata-scala.collection.Seq-int-">buildLocationMetadata</a></span>(scala.collection.Seq<org.apache.hadoop.fs.Path> paths, |
| int stopAppendingThreshold)</code> |
| <div class="block">Convert a sequence of <code>Path</code>s to a metadata string.</div> |
| </td> |
| </tr> |
| <tr id="i2" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#bytesToString-scala.math.BigInt-">bytesToString</a></span>(scala.math.BigInt size)</code> </td> |
| </tr> |
| <tr id="i3" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#bytesToString-long-">bytesToString</a></span>(long size)</code> |
| <div class="block">Convert a quantity in bytes to a human-readable string such as "4.0 MiB".</div> |
| </td> |
| </tr> |
| <tr id="i4" class="altColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#byteStringAsBytes-java.lang.String-">byteStringAsBytes</a></span>(String str)</code> |
| <div class="block">Convert a passed byte string (e.g.</div> |
| </td> |
| </tr> |
| <tr id="i5" class="rowColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#byteStringAsGb-java.lang.String-">byteStringAsGb</a></span>(String str)</code> |
| <div class="block">Convert a passed byte string (e.g.</div> |
| </td> |
| </tr> |
| <tr id="i6" class="altColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#byteStringAsKb-java.lang.String-">byteStringAsKb</a></span>(String str)</code> |
| <div class="block">Convert a passed byte string (e.g.</div> |
| </td> |
| </tr> |
| <tr id="i7" class="rowColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#byteStringAsMb-java.lang.String-">byteStringAsMb</a></span>(String str)</code> |
| <div class="block">Convert a passed byte string (e.g.</div> |
| </td> |
| </tr> |
| <tr id="i8" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#checkAndGetK8sMasterUrl-java.lang.String-">checkAndGetK8sMasterUrl</a></span>(String rawMasterURL)</code> |
| <div class="block">Check the validity of the given Kubernetes master URL and return the resolved URL.</div> |
| </td> |
| </tr> |
| <tr id="i9" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#checkHost-java.lang.String-">checkHost</a></span>(String host)</code> |
| <div class="block">Checks if the host contains only valid hostname/ip without port |
| NOTE: Incase of IPV6 ip it should be enclosed inside []</div> |
| </td> |
| </tr> |
| <tr id="i10" class="altColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#checkHostPort-java.lang.String-">checkHostPort</a></span>(String hostPort)</code> </td> |
| </tr> |
| <tr id="i11" class="rowColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#checkOffHeapEnabled-org.apache.spark.SparkConf-long-">checkOffHeapEnabled</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> sparkConf, |
| long offHeapSize)</code> |
| <div class="block">return 0 if MEMORY_OFFHEAP_ENABLED is false.</div> |
| </td> |
| </tr> |
| <tr id="i12" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#chmod700-java.io.File-">chmod700</a></span>(java.io.File file)</code> |
| <div class="block">JDK equivalent of <code>chmod 700 file</code>.</div> |
| </td> |
| </tr> |
| <tr id="i13" class="rowColor"> |
| <td class="colFirst"><code>static <C> Class<C></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#classForName-java.lang.String-boolean-boolean-">classForName</a></span>(String className, |
| boolean initialize, |
| boolean noSparkClassLoader)</code> |
| <div class="block">Preferred alternative to Class.forName(className), as well as |
| Class.forName(className, initialize, loader) with current thread's ContextClassLoader.</div> |
| </td> |
| </tr> |
| <tr id="i14" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#classIsLoadable-java.lang.String-">classIsLoadable</a></span>(String clazz)</code> |
| <div class="block">Determines whether the provided class is loadable in the current thread.</div> |
| </td> |
| </tr> |
| <tr id="i15" class="rowColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#clone-T-org.apache.spark.serializer.SerializerInstance-scala.reflect.ClassTag-">clone</a></span>(T value, |
| <a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a> serializer, |
| scala.reflect.ClassTag<T> evidence$2)</code> |
| <div class="block">Clone an object using a Spark serializer.</div> |
| </td> |
| </tr> |
| <tr id="i16" class="altColor"> |
| <td class="colFirst"><code>static java.util.Properties</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#cloneProperties-java.util.Properties-">cloneProperties</a></span>(java.util.Properties props)</code> |
| <div class="block">Create a new properties object with the same values as `props`</div> |
| </td> |
| </tr> |
| <tr id="i17" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#copyFileStreamNIO-java.nio.channels.FileChannel-java.nio.channels.WritableByteChannel-long-long-">copyFileStreamNIO</a></span>(java.nio.channels.FileChannel input, |
| java.nio.channels.WritableByteChannel output, |
| long startPosition, |
| long bytesToCopy)</code> </td> |
| </tr> |
| <tr id="i18" class="altColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#copyStream-java.io.InputStream-java.io.OutputStream-boolean-boolean-">copyStream</a></span>(java.io.InputStream in, |
| java.io.OutputStream out, |
| boolean closeStreams, |
| boolean transferToEnabled)</code> |
| <div class="block">Copy all data from an InputStream to an OutputStream.</div> |
| </td> |
| </tr> |
| <tr id="i19" class="rowColor"> |
| <td class="colFirst"><code>static java.io.InputStream</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#copyStreamUpTo-java.io.InputStream-long-">copyStreamUpTo</a></span>(java.io.InputStream in, |
| long maxSize)</code> |
| <div class="block">Copy the first <code>maxSize</code> bytes of data from the InputStream to an in-memory |
| buffer, primarily to check for corruption.</div> |
| </td> |
| </tr> |
| <tr id="i20" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createDirectory-java.io.File-">createDirectory</a></span>(java.io.File dir)</code> |
| <div class="block">Create a directory given the abstract pathname</div> |
| </td> |
| </tr> |
| <tr id="i21" class="rowColor"> |
| <td class="colFirst"><code>static java.io.File</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createDirectory-java.lang.String-java.lang.String-">createDirectory</a></span>(String root, |
| String namePrefix)</code> |
| <div class="block">Create a directory inside the given parent directory.</div> |
| </td> |
| </tr> |
| <tr id="i22" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createFailedToGetTokenMessage-java.lang.String-java.lang.Throwable-">createFailedToGetTokenMessage</a></span>(String serviceName, |
| Throwable e)</code> |
| <div class="block">Returns a string message about delegation token generation failure</div> |
| </td> |
| </tr> |
| <tr id="i23" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createSecret-org.apache.spark.SparkConf-">createSecret</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> </td> |
| </tr> |
| <tr id="i24" class="altColor"> |
| <td class="colFirst"><code>static java.io.File</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createTempDir--">createTempDir</a></span>()</code> |
| <div class="block">Create a temporary directory inside the <code>java.io.tmpdir</code> prefixed with <code>spark</code>.</div> |
| </td> |
| </tr> |
| <tr id="i25" class="rowColor"> |
| <td class="colFirst"><code>static java.io.File</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#createTempDir-java.lang.String-java.lang.String-">createTempDir</a></span>(String root, |
| String namePrefix)</code> |
| <div class="block">Create a temporary directory inside the given parent directory.</div> |
| </td> |
| </tr> |
| <tr id="i26" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#decodeFileNameInURI-java.net.URI-">decodeFileNameInURI</a></span>(java.net.URI uri)</code> |
| <div class="block">Get the file name from uri's raw path and decode it.</div> |
| </td> |
| </tr> |
| <tr id="i27" class="rowColor"> |
| <td class="colFirst"><code>static int</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#DEFAULT_DRIVER_MEM_MB--">DEFAULT_DRIVER_MEM_MB</a></span>()</code> |
| <div class="block">Define a default value for driver memory here since this value is referenced across the code |
| base and nearly all files already use Utils.scala</div> |
| </td> |
| </tr> |
| <tr id="i28" class="altColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#deleteRecursively-java.io.File-">deleteRecursively</a></span>(java.io.File file)</code> |
| <div class="block">Delete a file or directory and its contents recursively.</div> |
| </td> |
| </tr> |
| <tr id="i29" class="rowColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#deserialize-byte:A-">deserialize</a></span>(byte[] bytes)</code> |
| <div class="block">Deserialize an object using Java serialization</div> |
| </td> |
| </tr> |
| <tr id="i30" class="altColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#deserialize-byte:A-java.lang.ClassLoader-">deserialize</a></span>(byte[] bytes, |
| ClassLoader loader)</code> |
| <div class="block">Deserialize an object using Java serialization and the given ClassLoader</div> |
| </td> |
| </tr> |
| <tr id="i31" class="rowColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#deserializeLongValue-byte:A-">deserializeLongValue</a></span>(byte[] bytes)</code> |
| <div class="block">Deserialize a Long value (used for <code>org.apache.spark.api.python.PythonPartitioner</code>)</div> |
| </td> |
| </tr> |
| <tr id="i32" class="altColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#deserializeViaNestedStream-java.io.InputStream-org.apache.spark.serializer.SerializerInstance-scala.Function1-">deserializeViaNestedStream</a></span>(java.io.InputStream is, |
| <a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a> ser, |
| scala.Function1<<a href="../../../../org/apache/spark/serializer/DeserializationStream.html" title="class in org.apache.spark.serializer">DeserializationStream</a>,scala.runtime.BoxedUnit> f)</code> |
| <div class="block">Deserialize via nested stream using specific serializer</div> |
| </td> |
| </tr> |
| <tr id="i33" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#doesDirectoryContainAnyNewFiles-java.io.File-long-">doesDirectoryContainAnyNewFiles</a></span>(java.io.File dir, |
| long cutoff)</code> |
| <div class="block">Determines if a directory contains any files newer than cutoff seconds.</div> |
| </td> |
| </tr> |
| <tr id="i34" class="altColor"> |
| <td class="colFirst"><code>static java.io.File</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#doFetchFile-java.lang.String-java.io.File-java.lang.String-org.apache.spark.SparkConf-org.apache.hadoop.conf.Configuration-">doFetchFile</a></span>(String url, |
| java.io.File targetDir, |
| String filename, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| org.apache.hadoop.conf.Configuration hadoopConf)</code> |
| <div class="block">Download a file or directory to target directory.</div> |
| </td> |
| </tr> |
| <tr id="i35" class="rowColor"> |
| <td class="colFirst"><code>static scala.collection.immutable.Set<String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#EMPTY_USER_GROUPS--">EMPTY_USER_GROUPS</a></span>()</code> </td> |
| </tr> |
| <tr id="i36" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#encodeFileNameToURIRawPath-java.lang.String-">encodeFileNameToURIRawPath</a></span>(String fileName)</code> |
| <div class="block">A file name may contain some invalid URI characters, such as " ".</div> |
| </td> |
| </tr> |
| <tr id="i37" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#exceptionString-java.lang.Throwable-">exceptionString</a></span>(Throwable e)</code> |
| <div class="block">Return a nice string representation of the exception.</div> |
| </td> |
| </tr> |
| <tr id="i38" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#executeAndGetOutput-scala.collection.Seq-java.io.File-scala.collection.Map-boolean-">executeAndGetOutput</a></span>(scala.collection.Seq<String> command, |
| java.io.File workingDir, |
| scala.collection.Map<String,String> extraEnvironment, |
| boolean redirectStderr)</code> |
| <div class="block">Execute a command and get its output, throwing an exception if it yields a code other than 0.</div> |
| </td> |
| </tr> |
| <tr id="i39" class="rowColor"> |
| <td class="colFirst"><code>static Process</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#executeCommand-scala.collection.Seq-java.io.File-scala.collection.Map-boolean-">executeCommand</a></span>(scala.collection.Seq<String> command, |
| java.io.File workingDir, |
| scala.collection.Map<String,String> extraEnvironment, |
| boolean redirectStderr)</code> |
| <div class="block">Execute a command and return the process running the command.</div> |
| </td> |
| </tr> |
| <tr id="i40" class="altColor"> |
| <td class="colFirst"><code>static int</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#executorOffHeapMemorySizeAsMb-org.apache.spark.SparkConf-">executorOffHeapMemorySizeAsMb</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> sparkConf)</code> |
| <div class="block">Convert MEMORY_OFFHEAP_SIZE to MB Unit, return 0 if MEMORY_OFFHEAP_ENABLED is false.</div> |
| </td> |
| </tr> |
| <tr id="i41" class="rowColor"> |
| <td class="colFirst"><code>static scala.Tuple2<String,Object></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#extractHostPortFromSparkUrl-java.lang.String-">extractHostPortFromSparkUrl</a></span>(String sparkUrl)</code> |
| <div class="block">Return a pair of host and port extracted from the <code>sparkUrl</code>.</div> |
| </td> |
| </tr> |
| <tr id="i42" class="altColor"> |
| <td class="colFirst"><code>static java.io.File</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#fetchFile-java.lang.String-java.io.File-org.apache.spark.SparkConf-org.apache.hadoop.conf.Configuration-long-boolean-boolean-">fetchFile</a></span>(String url, |
| java.io.File targetDir, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| org.apache.hadoop.conf.Configuration hadoopConf, |
| long timestamp, |
| boolean useCache, |
| boolean shouldUntar)</code> |
| <div class="block">Download a file or directory to target directory.</div> |
| </td> |
| </tr> |
| <tr id="i43" class="rowColor"> |
| <td class="colFirst"><code>static org.apache.spark.util.CallSite</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getCallSite-scala.Function1-">getCallSite</a></span>(scala.Function1<String,Object> skipClass)</code> |
| <div class="block">When called inside a class in the spark package, returns the name of the user code class |
| (outside the spark package) that called into Spark, as well as which Spark method they called.</div> |
| </td> |
| </tr> |
| <tr id="i44" class="altColor"> |
| <td class="colFirst"><code>static String[]</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getConfiguredLocalDirs-org.apache.spark.SparkConf-">getConfiguredLocalDirs</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> |
| <div class="block">Return the configured local directories where Spark can write files.</div> |
| </td> |
| </tr> |
| <tr id="i45" class="rowColor"> |
| <td class="colFirst"><code>static ClassLoader</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getContextOrSparkClassLoader--">getContextOrSparkClassLoader</a></span>()</code> |
| <div class="block">Get the Context ClassLoader on this thread or, if not present, the ClassLoader that |
| loaded Spark.</div> |
| </td> |
| </tr> |
| <tr id="i46" class="altColor"> |
| <td class="colFirst"><code>static scala.collection.immutable.Set<String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getCurrentUserGroups-org.apache.spark.SparkConf-java.lang.String-">getCurrentUserGroups</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> sparkConf, |
| String username)</code> </td> |
| </tr> |
| <tr id="i47" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getCurrentUserName--">getCurrentUserName</a></span>()</code> |
| <div class="block">Returns the current user name.</div> |
| </td> |
| </tr> |
| <tr id="i48" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getDefaultPropertiesFile-scala.collection.Map-">getDefaultPropertiesFile</a></span>(scala.collection.Map<String,String> env)</code> |
| <div class="block">Return the path of the default Spark properties file.</div> |
| </td> |
| </tr> |
| <tr id="i49" class="rowColor"> |
| <td class="colFirst"><code>static int</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getDynamicAllocationInitialExecutors-org.apache.spark.SparkConf-">getDynamicAllocationInitialExecutors</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> |
| <div class="block">Return the initial number of executors for dynamic allocation.</div> |
| </td> |
| </tr> |
| <tr id="i50" class="altColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getFileLength-java.io.File-org.apache.spark.SparkConf-">getFileLength</a></span>(java.io.File file, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> workConf)</code> |
| <div class="block">Return the file length, if the file is compressed it returns the uncompressed file length.</div> |
| </td> |
| </tr> |
| <tr id="i51" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getFormattedClassName-java.lang.Object-">getFormattedClassName</a></span>(Object obj)</code> |
| <div class="block">Return the class name of the given object, removing all dollar signs</div> |
| </td> |
| </tr> |
| <tr id="i52" class="altColor"> |
| <td class="colFirst"><code>static org.apache.hadoop.fs.FileSystem</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getHadoopFileSystem-java.lang.String-org.apache.hadoop.conf.Configuration-">getHadoopFileSystem</a></span>(String path, |
| org.apache.hadoop.conf.Configuration conf)</code> |
| <div class="block">Return a Hadoop FileSystem with the scheme encoded in the given path.</div> |
| </td> |
| </tr> |
| <tr id="i53" class="rowColor"> |
| <td class="colFirst"><code>static org.apache.hadoop.fs.FileSystem</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getHadoopFileSystem-java.net.URI-org.apache.hadoop.conf.Configuration-">getHadoopFileSystem</a></span>(java.net.URI path, |
| org.apache.hadoop.conf.Configuration conf)</code> |
| <div class="block">Return a Hadoop FileSystem with the scheme encoded in the given path.</div> |
| </td> |
| </tr> |
| <tr id="i54" class="altColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getIteratorSize-scala.collection.Iterator-">getIteratorSize</a></span>(scala.collection.Iterator<?> iterator)</code> |
| <div class="block">Counts the number of elements of an iterator.</div> |
| </td> |
| </tr> |
| <tr id="i55" class="rowColor"> |
| <td class="colFirst"><code>static <T> scala.collection.Iterator<scala.Tuple2<T,Object>></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getIteratorZipWithIndex-scala.collection.Iterator-long-">getIteratorZipWithIndex</a></span>(scala.collection.Iterator<T> iter, |
| long startIndex)</code> |
| <div class="block">Generate a zipWithIndex iterator, avoid index value overflowing problem |
| in scala's zipWithIndex</div> |
| </td> |
| </tr> |
| <tr id="i56" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getLocalDir-org.apache.spark.SparkConf-">getLocalDir</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> |
| <div class="block">Get the path of a temporary directory.</div> |
| </td> |
| </tr> |
| <tr id="i57" class="rowColor"> |
| <td class="colFirst"><code>static scala.collection.Seq<String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getLocalUserJarsForShell-org.apache.spark.SparkConf-">getLocalUserJarsForShell</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> |
| <div class="block">Return the local jar files which will be added to REPL's classpath.</div> |
| </td> |
| </tr> |
| <tr id="i58" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getProcessName--">getProcessName</a></span>()</code> |
| <div class="block">Returns the name of this JVM process.</div> |
| </td> |
| </tr> |
| <tr id="i59" class="rowColor"> |
| <td class="colFirst"><code>static scala.collection.Map<String,String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getPropertiesFromFile-java.lang.String-">getPropertiesFromFile</a></span>(String filename)</code> |
| <div class="block">Load properties present in the given file.</div> |
| </td> |
| </tr> |
| <tr id="i60" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getSimpleName-java.lang.Class-">getSimpleName</a></span>(Class<?> cls)</code> |
| <div class="block">Safer than Class obj's getSimpleName which may throw Malformed class name error in scala.</div> |
| </td> |
| </tr> |
| <tr id="i61" class="rowColor"> |
| <td class="colFirst"><code>static ClassLoader</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getSparkClassLoader--">getSparkClassLoader</a></span>()</code> |
| <div class="block">Get the ClassLoader which loaded Spark.</div> |
| </td> |
| </tr> |
| <tr id="i62" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getSparkOrYarnConfig-org.apache.spark.SparkConf-java.lang.String-java.lang.String-">getSparkOrYarnConfig</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| String key, |
| String default_)</code> |
| <div class="block">Return the value of a config either through the SparkConf or the Hadoop configuration.</div> |
| </td> |
| </tr> |
| <tr id="i63" class="rowColor"> |
| <td class="colFirst"><code>static scala.Option<String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getStderr-java.lang.Process-long-">getStderr</a></span>(Process process, |
| long timeoutMs)</code> |
| <div class="block">Return the stderr of a process after waiting for the process to terminate.</div> |
| </td> |
| </tr> |
| <tr id="i64" class="altColor"> |
| <td class="colFirst"><code>static scala.collection.Map<String,String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getSystemProperties--">getSystemProperties</a></span>()</code> |
| <div class="block">Returns the system properties map that is thread-safe to iterator over.</div> |
| </td> |
| </tr> |
| <tr id="i65" class="rowColor"> |
| <td class="colFirst"><code>static <a href="../../../../org/apache/spark/status/api/v1/ThreadStackTrace.html" title="class in org.apache.spark.status.api.v1">ThreadStackTrace</a>[]</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getThreadDump--">getThreadDump</a></span>()</code> |
| <div class="block">Return a thread dump of all threads' stacktraces.</div> |
| </td> |
| </tr> |
| <tr id="i66" class="altColor"> |
| <td class="colFirst"><code>static scala.Option<<a href="../../../../org/apache/spark/status/api/v1/ThreadStackTrace.html" title="class in org.apache.spark.status.api.v1">ThreadStackTrace</a>></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getThreadDumpForThread-long-">getThreadDumpForThread</a></span>(long threadId)</code> </td> |
| </tr> |
| <tr id="i67" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getUsedTimeNs-long-">getUsedTimeNs</a></span>(long startTimeNs)</code> |
| <div class="block">Return the string to tell how long has passed in milliseconds.</div> |
| </td> |
| </tr> |
| <tr id="i68" class="altColor"> |
| <td class="colFirst"><code>static scala.collection.Seq<String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#getUserJars-org.apache.spark.SparkConf-">getUserJars</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> |
| <div class="block">Return the jar files pointed by the "spark.jars" property.</div> |
| </td> |
| </tr> |
| <tr id="i69" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#initDaemon-org.slf4j.Logger-">initDaemon</a></span>(org.slf4j.Logger log)</code> |
| <div class="block">Utility function that should be called early in <code>main()</code> for daemons to set up some common |
| diagnostic state.</div> |
| </td> |
| </tr> |
| <tr id="i70" class="altColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#instantiateSerializerFromConf-org.apache.spark.internal.config.ConfigEntry-org.apache.spark.SparkConf-boolean-">instantiateSerializerFromConf</a></span>(org.apache.spark.internal.config.ConfigEntry<String> propertyName, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| boolean isDriver)</code> </td> |
| </tr> |
| <tr id="i71" class="rowColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#instantiateSerializerOrShuffleManager-java.lang.String-org.apache.spark.SparkConf-boolean-">instantiateSerializerOrShuffleManager</a></span>(String className, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| boolean isDriver)</code> </td> |
| </tr> |
| <tr id="i72" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isAbsoluteURI-java.lang.String-">isAbsoluteURI</a></span>(String path)</code> |
| <div class="block">Check whether a path is an absolute URI.</div> |
| </td> |
| </tr> |
| <tr id="i73" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isBindCollision-java.lang.Throwable-">isBindCollision</a></span>(Throwable exception)</code> |
| <div class="block">Return whether the exception is caused by an address-port collision when binding.</div> |
| </td> |
| </tr> |
| <tr id="i74" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isClientMode-org.apache.spark.SparkConf-">isClientMode</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> </td> |
| </tr> |
| <tr id="i75" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isDynamicAllocationEnabled-org.apache.spark.SparkConf-">isDynamicAllocationEnabled</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> |
| <div class="block">Return whether dynamic allocation is enabled in the given conf.</div> |
| </td> |
| </tr> |
| <tr id="i76" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isFatalError-java.lang.Throwable-">isFatalError</a></span>(Throwable e)</code> |
| <div class="block">Returns true if the given exception was fatal.</div> |
| </td> |
| </tr> |
| <tr id="i77" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isFileSplittable-org.apache.hadoop.fs.Path-org.apache.hadoop.io.compress.CompressionCodecFactory-">isFileSplittable</a></span>(org.apache.hadoop.fs.Path path, |
| org.apache.hadoop.io.compress.CompressionCodecFactory codecFactory)</code> |
| <div class="block">Check whether the file of the path is splittable.</div> |
| </td> |
| </tr> |
| <tr id="i78" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isG1GC--">isG1GC</a></span>()</code> </td> |
| </tr> |
| <tr id="i79" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isInDirectory-java.io.File-java.io.File-">isInDirectory</a></span>(java.io.File parent, |
| java.io.File child)</code> |
| <div class="block">Return whether the specified file is a parent directory of the child file.</div> |
| </td> |
| </tr> |
| <tr id="i80" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isInRunningSparkTask--">isInRunningSparkTask</a></span>()</code> |
| <div class="block">Returns if the current codes are running in a Spark task, e.g., in executors.</div> |
| </td> |
| </tr> |
| <tr id="i81" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isLocalMaster-org.apache.spark.SparkConf-">isLocalMaster</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> </td> |
| </tr> |
| <tr id="i82" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isLocalUri-java.lang.String-">isLocalUri</a></span>(String uri)</code> |
| <div class="block">Returns whether the URI is a "local:" URI.</div> |
| </td> |
| </tr> |
| <tr id="i83" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isMac--">isMac</a></span>()</code> |
| <div class="block">Whether the underlying operating system is Mac OS X.</div> |
| </td> |
| </tr> |
| <tr id="i84" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isMacOnAppleSilicon--">isMacOnAppleSilicon</a></span>()</code> |
| <div class="block">Whether the underlying operating system is Mac OS X and processor is Apple Silicon.</div> |
| </td> |
| </tr> |
| <tr id="i85" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isMemberClass-java.lang.Class-">isMemberClass</a></span>(Class<?> cls)</code> |
| <div class="block">Returns true if and only if the underlying class is a member class.</div> |
| </td> |
| </tr> |
| <tr id="i86" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isPushBasedShuffleEnabled-org.apache.spark.SparkConf-boolean-boolean-">isPushBasedShuffleEnabled</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| boolean isDriver, |
| boolean checkSerializer)</code> |
| <div class="block">Push based shuffle can only be enabled when below conditions are met: |
| - the application is submitted to run in YARN mode |
| - external shuffle service enabled |
| - IO encryption disabled |
| - serializer(such as KryoSerializer) supports relocation of serialized objects</div> |
| </td> |
| </tr> |
| <tr id="i87" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isStreamingDynamicAllocationEnabled-org.apache.spark.SparkConf-">isStreamingDynamicAllocationEnabled</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> </td> |
| </tr> |
| <tr id="i88" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isTesting--">isTesting</a></span>()</code> |
| <div class="block">Indicates whether Spark is currently running unit tests.</div> |
| </td> |
| </tr> |
| <tr id="i89" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#isWindows--">isWindows</a></span>()</code> |
| <div class="block">Whether the underlying operating system is Windows.</div> |
| </td> |
| </tr> |
| <tr id="i90" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#libraryPathEnvName--">libraryPathEnvName</a></span>()</code> |
| <div class="block">Return the current system LD_LIBRARY_PATH name</div> |
| </td> |
| </tr> |
| <tr id="i91" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#libraryPathEnvPrefix-scala.collection.Seq-">libraryPathEnvPrefix</a></span>(scala.collection.Seq<String> libraryPaths)</code> |
| <div class="block">Return the prefix of a command that appends the given library paths to the |
| system-specific library path environment variable.</div> |
| </td> |
| </tr> |
| <tr id="i92" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#loadDefaultSparkProperties-org.apache.spark.SparkConf-java.lang.String-">loadDefaultSparkProperties</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| String filePath)</code> |
| <div class="block">Load default Spark properties from the given file.</div> |
| </td> |
| </tr> |
| <tr id="i93" class="rowColor"> |
| <td class="colFirst"><code>static <T> scala.collection.Seq<T></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#loadExtensions-java.lang.Class-scala.collection.Seq-org.apache.spark.SparkConf-">loadExtensions</a></span>(Class<T> extClass, |
| scala.collection.Seq<String> classes, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> |
| <div class="block">Create instances of extension classes.</div> |
| </td> |
| </tr> |
| <tr id="i94" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#LOCAL_SCHEME--">LOCAL_SCHEME</a></span>()</code> |
| <div class="block">Scheme used for files that are locally available on worker nodes in the cluster.</div> |
| </td> |
| </tr> |
| <tr id="i95" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#localCanonicalHostName--">localCanonicalHostName</a></span>()</code> |
| <div class="block">Get the local machine's FQDN.</div> |
| </td> |
| </tr> |
| <tr id="i96" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#localHostName--">localHostName</a></span>()</code> |
| <div class="block">Get the local machine's hostname.</div> |
| </td> |
| </tr> |
| <tr id="i97" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#localHostNameForURI--">localHostNameForURI</a></span>()</code> |
| <div class="block">Get the local machine's URI.</div> |
| </td> |
| </tr> |
| <tr id="i98" class="altColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#logUncaughtExceptions-scala.Function0-">logUncaughtExceptions</a></span>(scala.Function0<T> f)</code> |
| <div class="block">Execute the given block, logging and re-throwing any uncaught exception.</div> |
| </td> |
| </tr> |
| <tr id="i99" class="rowColor"> |
| <td class="colFirst"><code>static int</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#MAX_DIR_CREATION_ATTEMPTS--">MAX_DIR_CREATION_ATTEMPTS</a></span>()</code> </td> |
| </tr> |
| <tr id="i100" class="altColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#median-long:A-boolean-">median</a></span>(long[] sizes, |
| boolean alreadySorted)</code> |
| <div class="block">Return the median number of a long array</div> |
| </td> |
| </tr> |
| <tr id="i101" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#megabytesToString-long-">megabytesToString</a></span>(long megabytes)</code> |
| <div class="block">Convert a quantity in megabytes to a human-readable string such as "4.0 MiB".</div> |
| </td> |
| </tr> |
| <tr id="i102" class="altColor"> |
| <td class="colFirst"><code>static int</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#memoryStringToMb-java.lang.String-">memoryStringToMb</a></span>(String str)</code> |
| <div class="block">Convert a Java memory parameter passed to -Xmx (such as 300m or 1g) to a number of mebibytes.</div> |
| </td> |
| </tr> |
| <tr id="i103" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#msDurationToString-long-">msDurationToString</a></span>(long ms)</code> |
| <div class="block">Returns a human-readable string representing a duration such as "35ms"</div> |
| </td> |
| </tr> |
| <tr id="i104" class="altColor"> |
| <td class="colFirst"><code>static String[]</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#nonLocalPaths-java.lang.String-boolean-">nonLocalPaths</a></span>(String paths, |
| boolean testWindows)</code> |
| <div class="block">Return all non-local paths from a comma-separated list of paths.</div> |
| </td> |
| </tr> |
| <tr id="i105" class="rowColor"> |
| <td class="colFirst"><code>static int</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#nonNegativeHash-java.lang.Object-">nonNegativeHash</a></span>(Object obj)</code> </td> |
| </tr> |
| <tr id="i106" class="altColor"> |
| <td class="colFirst"><code>static int</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#nonNegativeMod-int-int-">nonNegativeMod</a></span>(int x, |
| int mod)</code> </td> |
| </tr> |
| <tr id="i107" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#offsetBytes-scala.collection.Seq-scala.collection.Seq-long-long-">offsetBytes</a></span>(scala.collection.Seq<java.io.File> files, |
| scala.collection.Seq<Object> fileLengths, |
| long start, |
| long end)</code> |
| <div class="block">Return a string containing data across a set of files.</div> |
| </td> |
| </tr> |
| <tr id="i108" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#offsetBytes-java.lang.String-long-long-long-">offsetBytes</a></span>(String path, |
| long length, |
| long start, |
| long end)</code> |
| <div class="block">Return a string containing part of a file from byte 'start' to 'end'.</div> |
| </td> |
| </tr> |
| <tr id="i109" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#org:Dapache:Dspark:Dinternal:DLogging:D:Dlog__:Deq-org.slf4j.Logger-">org$apache$spark$internal$Logging$$log__$eq</a></span>(org.slf4j.Logger x$1)</code> </td> |
| </tr> |
| <tr id="i110" class="altColor"> |
| <td class="colFirst"><code>static org.slf4j.Logger</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#org:Dapache:Dspark:Dinternal:DLogging:D:Dlog_--">org$apache$spark$internal$Logging$$log_</a></span>()</code> </td> |
| </tr> |
| <tr id="i111" class="rowColor"> |
| <td class="colFirst"><code>static scala.Tuple2<String,Object></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#parseHostPort-java.lang.String-">parseHostPort</a></span>(String hostPort)</code> </td> |
| </tr> |
| <tr id="i112" class="altColor"> |
| <td class="colFirst"><code>static String[]</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#parseStandaloneMasterUrls-java.lang.String-">parseStandaloneMasterUrls</a></span>(String masterUrls)</code> |
| <div class="block">Split the comma delimited string of master URLs into a list.</div> |
| </td> |
| </tr> |
| <tr id="i113" class="rowColor"> |
| <td class="colFirst"><code>static int</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#portMaxRetries-org.apache.spark.SparkConf-">portMaxRetries</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</code> |
| <div class="block">Maximum number of retries when binding to a port before giving up.</div> |
| </td> |
| </tr> |
| <tr id="i114" class="altColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#preferIPv6--">preferIPv6</a></span>()</code> |
| <div class="block">Whether the underlying JVM prefer IPv6 addresses.</div> |
| </td> |
| </tr> |
| <tr id="i115" class="rowColor"> |
| <td class="colFirst"><code>static Thread</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#processStreamByLine-java.lang.String-java.io.InputStream-scala.Function1-">processStreamByLine</a></span>(String threadName, |
| java.io.InputStream inputStream, |
| scala.Function1<String,scala.runtime.BoxedUnit> processLine)</code> |
| <div class="block">Return and start a daemon thread that processes the content of the input stream line by line.</div> |
| </td> |
| </tr> |
| <tr id="i116" class="altColor"> |
| <td class="colFirst"><code>static java.util.Random</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#random--">random</a></span>()</code> </td> |
| </tr> |
| <tr id="i117" class="rowColor"> |
| <td class="colFirst"><code>static <T> scala.collection.Seq<T></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#randomize-scala.collection.TraversableOnce-scala.reflect.ClassTag-">randomize</a></span>(scala.collection.TraversableOnce<T> seq, |
| scala.reflect.ClassTag<T> evidence$1)</code> |
| <div class="block">Shuffle the elements of a collection into a random order, returning the |
| result in a new collection.</div> |
| </td> |
| </tr> |
| <tr id="i118" class="altColor"> |
| <td class="colFirst"><code>static <T> Object</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#randomizeInPlace-java.lang.Object-java.util.Random-">randomizeInPlace</a></span>(Object arr, |
| java.util.Random rand)</code> |
| <div class="block">Shuffle the elements of an array into a random order, modifying the |
| original array.</div> |
| </td> |
| </tr> |
| <tr id="i119" class="rowColor"> |
| <td class="colFirst"><code>static java.io.File[]</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#recursiveList-java.io.File-">recursiveList</a></span>(java.io.File f)</code> |
| <div class="block">Lists files recursively.</div> |
| </td> |
| </tr> |
| <tr id="i120" class="altColor"> |
| <td class="colFirst"><code>static scala.collection.Seq<scala.Tuple2<String,String>></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#redact-scala.collection.Map-">redact</a></span>(scala.collection.Map<String,String> kvs)</code> |
| <div class="block">Looks up the redaction regex from within the key value pairs and uses it to redact the rest |
| of the key value pairs.</div> |
| </td> |
| </tr> |
| <tr id="i121" class="rowColor"> |
| <td class="colFirst"><code>static <K,V> scala.collection.Seq<scala.Tuple2<K,V>></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#redact-scala.Option-scala.collection.Seq-">redact</a></span>(scala.Option<scala.util.matching.Regex> regex, |
| scala.collection.Seq<scala.Tuple2<K,V>> kvs)</code> |
| <div class="block">Redact the sensitive values in the given map.</div> |
| </td> |
| </tr> |
| <tr id="i122" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#redact-scala.Option-java.lang.String-">redact</a></span>(scala.Option<scala.util.matching.Regex> regex, |
| String text)</code> |
| <div class="block">Redact the sensitive information in the given string.</div> |
| </td> |
| </tr> |
| <tr id="i123" class="rowColor"> |
| <td class="colFirst"><code>static scala.collection.Seq<scala.Tuple2<String,String>></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#redact-org.apache.spark.SparkConf-scala.collection.Seq-">redact</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| scala.collection.Seq<scala.Tuple2<String,String>> kvs)</code> |
| <div class="block">Redact the sensitive values in the given map.</div> |
| </td> |
| </tr> |
| <tr id="i124" class="altColor"> |
| <td class="colFirst"><code>static scala.collection.Seq<String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#redactCommandLineArgs-org.apache.spark.SparkConf-scala.collection.Seq-">redactCommandLineArgs</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| scala.collection.Seq<String> commands)</code> </td> |
| </tr> |
| <tr id="i125" class="rowColor"> |
| <td class="colFirst"><code>static java.net.URI</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#resolveURI-java.lang.String-">resolveURI</a></span>(String path)</code> |
| <div class="block">Return a well-formed URI for the file described by a user input string.</div> |
| </td> |
| </tr> |
| <tr id="i126" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#resolveURIs-java.lang.String-">resolveURIs</a></span>(String paths)</code> |
| <div class="block">Resolve a comma-separated list of paths.</div> |
| </td> |
| </tr> |
| <tr id="i127" class="rowColor"> |
| <td class="colFirst"><code>static boolean</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#responseFromBackup-java.lang.String-">responseFromBackup</a></span>(String msg)</code> |
| <div class="block">Return true if the response message is sent from a backup Master on standby.</div> |
| </td> |
| </tr> |
| <tr id="i128" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#sanitizeDirName-java.lang.String-">sanitizeDirName</a></span>(String str)</code> </td> |
| </tr> |
| <tr id="i129" class="rowColor"> |
| <td class="colFirst"><code>static <T> byte[]</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#serialize-T-">serialize</a></span>(T o)</code> |
| <div class="block">Serialize an object using Java serialization</div> |
| </td> |
| </tr> |
| <tr id="i130" class="altColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#serializeViaNestedStream-java.io.OutputStream-org.apache.spark.serializer.SerializerInstance-scala.Function1-">serializeViaNestedStream</a></span>(java.io.OutputStream os, |
| <a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a> ser, |
| scala.Function1<<a href="../../../../org/apache/spark/serializer/SerializationStream.html" title="class in org.apache.spark.serializer">SerializationStream</a>,scala.runtime.BoxedUnit> f)</code> |
| <div class="block">Serialize via nested stream using specific serializer</div> |
| </td> |
| </tr> |
| <tr id="i131" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#setCustomHostname-java.lang.String-">setCustomHostname</a></span>(String hostname)</code> |
| <div class="block">Allow setting a custom host name because when we run on Mesos we need to use the same |
| hostname it reports to the master.</div> |
| </td> |
| </tr> |
| <tr id="i132" class="altColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#setLogLevel-org.apache.logging.log4j.Level-">setLogLevel</a></span>(org.apache.logging.log4j.Level l)</code> |
| <div class="block">configure a new log4j level</div> |
| </td> |
| </tr> |
| <tr id="i133" class="rowColor"> |
| <td class="colFirst"><code>static scala.collection.Seq<String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#sparkJavaOpts-org.apache.spark.SparkConf-scala.Function1-">sparkJavaOpts</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| scala.Function1<String,Object> filterKey)</code> |
| <div class="block">Convert all spark properties set in the given SparkConf to a sequence of java options.</div> |
| </td> |
| </tr> |
| <tr id="i134" class="altColor"> |
| <td class="colFirst"><code>static scala.collection.Seq<String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#splitCommandString-java.lang.String-">splitCommandString</a></span>(String s)</code> |
| <div class="block">Split a string of potentially quoted arguments from the command line the way that a shell |
| would do it to determine arguments to a command.</div> |
| </td> |
| </tr> |
| <tr id="i135" class="rowColor"> |
| <td class="colFirst"><code>static <T> scala.Tuple2<T,Object></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#startServiceOnPort-int-scala.Function1-org.apache.spark.SparkConf-java.lang.String-">startServiceOnPort</a></span>(int startPort, |
| scala.Function1<Object,scala.Tuple2<T,Object>> startService, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| String serviceName)</code> |
| <div class="block">Attempt to start a service on the given port, or fail after a number of attempts.</div> |
| </td> |
| </tr> |
| <tr id="i136" class="altColor"> |
| <td class="colFirst"><code>static int</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#stringHalfWidth-java.lang.String-">stringHalfWidth</a></span>(String str)</code> |
| <div class="block">Return the number of half widths in a given string.</div> |
| </td> |
| </tr> |
| <tr id="i137" class="rowColor"> |
| <td class="colFirst"><code>static scala.collection.Seq<String></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#stringToSeq-java.lang.String-">stringToSeq</a></span>(String str)</code> </td> |
| </tr> |
| <tr id="i138" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#stripDollars-java.lang.String-">stripDollars</a></span>(String s)</code> |
| <div class="block">Remove trailing dollar signs from qualified class name, |
| and return the trailing part after the last dollar sign in the middle</div> |
| </td> |
| </tr> |
| <tr id="i139" class="rowColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#substituteAppId-java.lang.String-java.lang.String-">substituteAppId</a></span>(String opt, |
| String appId)</code> |
| <div class="block">Replaces all the {{APP_ID}} occurrences with the App Id.</div> |
| </td> |
| </tr> |
| <tr id="i140" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#substituteAppNExecIds-java.lang.String-java.lang.String-java.lang.String-">substituteAppNExecIds</a></span>(String opt, |
| String appId, |
| String execId)</code> |
| <div class="block">Replaces all the {{EXECUTOR_ID}} occurrences with the Executor Id |
| and {{APP_ID}} occurrences with the App Id.</div> |
| </td> |
| </tr> |
| <tr id="i141" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#symlink-java.io.File-java.io.File-">symlink</a></span>(java.io.File src, |
| java.io.File dst)</code> |
| <div class="block">Creates a symlink.</div> |
| </td> |
| </tr> |
| <tr id="i142" class="altColor"> |
| <td class="colFirst"><code>static java.io.File</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tempFileWith-java.io.File-">tempFileWith</a></span>(java.io.File path)</code> |
| <div class="block">Returns a path of temporary file which is in the same directory with <code>path</code>.</div> |
| </td> |
| </tr> |
| <tr id="i143" class="rowColor"> |
| <td class="colFirst"><code>static scala.Option<Object></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#terminateProcess-java.lang.Process-long-">terminateProcess</a></span>(Process process, |
| long timeoutMs)</code> |
| <div class="block">Terminates a process waiting for at most the specified duration.</div> |
| </td> |
| </tr> |
| <tr id="i144" class="altColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#timeIt-int-scala.Function0-scala.Option-">timeIt</a></span>(int numIters, |
| scala.Function0<scala.runtime.BoxedUnit> f, |
| scala.Option<scala.Function0<scala.runtime.BoxedUnit>> prepare)</code> |
| <div class="block">Timing method based on iterations that permit JVM JIT optimization.</div> |
| </td> |
| </tr> |
| <tr id="i145" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#times-int-scala.Function0-">times</a></span>(int numIters, |
| scala.Function0<scala.runtime.BoxedUnit> f)</code> |
| <div class="block">Method executed for repeating a task for side effects.</div> |
| </td> |
| </tr> |
| <tr id="i146" class="altColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#timeStringAsMs-java.lang.String-">timeStringAsMs</a></span>(String str)</code> |
| <div class="block">Convert a time parameter such as (50s, 100ms, or 250us) to milliseconds for internal use.</div> |
| </td> |
| </tr> |
| <tr id="i147" class="rowColor"> |
| <td class="colFirst"><code>static long</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#timeStringAsSeconds-java.lang.String-">timeStringAsSeconds</a></span>(String str)</code> |
| <div class="block">Convert a time parameter such as (50s, 100ms, or 250us) to seconds for internal use.</div> |
| </td> |
| </tr> |
| <tr id="i148" class="altColor"> |
| <td class="colFirst"><code>static <T> scala.Tuple2<T,Object></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#timeTakenMs-scala.Function0-">timeTakenMs</a></span>(scala.Function0<T> body)</code> |
| <div class="block">Records the duration of running `body`.</div> |
| </td> |
| </tr> |
| <tr id="i149" class="rowColor"> |
| <td class="colFirst"><code>static <T> scala.util.Try<T></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryLog-scala.Function0-">tryLog</a></span>(scala.Function0<T> f)</code> |
| <div class="block">Executes the given block in a Try, logging any uncaught exceptions.</div> |
| </td> |
| </tr> |
| <tr id="i150" class="altColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryLogNonFatalError-scala.Function0-">tryLogNonFatalError</a></span>(scala.Function0<scala.runtime.BoxedUnit> block)</code> |
| <div class="block">Executes the given block.</div> |
| </td> |
| </tr> |
| <tr id="i151" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryOrExit-scala.Function0-">tryOrExit</a></span>(scala.Function0<scala.runtime.BoxedUnit> block)</code> |
| <div class="block">Execute a block of code that evaluates to Unit, forwarding any uncaught exceptions to the |
| default UncaughtExceptionHandler</div> |
| </td> |
| </tr> |
| <tr id="i152" class="altColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryOrIOException-scala.Function0-">tryOrIOException</a></span>(scala.Function0<T> block)</code> |
| <div class="block">Execute a block of code that returns a value, re-throwing any non-fatal uncaught |
| exceptions as IOException.</div> |
| </td> |
| </tr> |
| <tr id="i153" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryOrStopSparkContext-org.apache.spark.SparkContext-scala.Function0-">tryOrStopSparkContext</a></span>(<a href="../../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a> sc, |
| scala.Function0<scala.runtime.BoxedUnit> block)</code> |
| <div class="block">Execute a block of code that evaluates to Unit, stop SparkContext if there is any uncaught |
| exception</div> |
| </td> |
| </tr> |
| <tr id="i154" class="altColor"> |
| <td class="colFirst"><code>static <R extends java.io.Closeable,T><br>T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryWithResource-scala.Function0-scala.Function1-">tryWithResource</a></span>(scala.Function0<R> createResource, |
| scala.Function1<R,T> f)</code> </td> |
| </tr> |
| <tr id="i155" class="rowColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryWithSafeFinally-scala.Function0-scala.Function0-">tryWithSafeFinally</a></span>(scala.Function0<T> block, |
| scala.Function0<scala.runtime.BoxedUnit> finallyBlock)</code> |
| <div class="block">Execute a block of code, then a finally block, but if exceptions happen in |
| the finally block, do not suppress the original exception.</div> |
| </td> |
| </tr> |
| <tr id="i156" class="altColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#tryWithSafeFinallyAndFailureCallbacks-scala.Function0-scala.Function0-scala.Function0-">tryWithSafeFinallyAndFailureCallbacks</a></span>(scala.Function0<T> block, |
| scala.Function0<scala.runtime.BoxedUnit> catchBlock, |
| scala.Function0<scala.runtime.BoxedUnit> finallyBlock)</code> |
| <div class="block">Execute a block of code and call the failure callbacks in the catch block.</div> |
| </td> |
| </tr> |
| <tr id="i157" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#unpack-java.io.File-java.io.File-">unpack</a></span>(java.io.File source, |
| java.io.File dest)</code> |
| <div class="block">Unpacks an archive file into the specified directory.</div> |
| </td> |
| </tr> |
| <tr id="i158" class="altColor"> |
| <td class="colFirst"><code>static scala.collection.Seq<java.io.File></code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#unzipFilesFromFile-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.io.File-">unzipFilesFromFile</a></span>(org.apache.hadoop.fs.FileSystem fs, |
| org.apache.hadoop.fs.Path dfsZipFile, |
| java.io.File localDir)</code> |
| <div class="block">Decompress a zip file into a local dir.</div> |
| </td> |
| </tr> |
| <tr id="i159" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#updateSparkConfigFromProperties-org.apache.spark.SparkConf-scala.collection.Map-">updateSparkConfigFromProperties</a></span>(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| scala.collection.Map<String,String> properties)</code> |
| <div class="block">Updates Spark config with properties from a set of Properties.</div> |
| </td> |
| </tr> |
| <tr id="i160" class="altColor"> |
| <td class="colFirst"><code>static int</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#userPort-int-int-">userPort</a></span>(int base, |
| int offset)</code> |
| <div class="block">Returns the user port to try when trying to bind a service.</div> |
| </td> |
| </tr> |
| <tr id="i161" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#validateURL-java.net.URI-">validateURL</a></span>(java.net.URI uri)</code> |
| <div class="block">Validate that a given URI is actually a valid URL as well.</div> |
| </td> |
| </tr> |
| <tr id="i162" class="altColor"> |
| <td class="colFirst"><code>static String</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#weakIntern-java.lang.String-">weakIntern</a></span>(String s)</code> |
| <div class="block">String interning to reduce the memory usage.</div> |
| </td> |
| </tr> |
| <tr id="i163" class="rowColor"> |
| <td class="colFirst"><code>static scala.util.matching.Regex</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#windowsDrive--">windowsDrive</a></span>()</code> |
| <div class="block">Pattern for matching a Windows drive, which contains only a single alphabet character.</div> |
| </td> |
| </tr> |
| <tr id="i164" class="altColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#withContextClassLoader-java.lang.ClassLoader-scala.Function0-">withContextClassLoader</a></span>(ClassLoader ctxClassLoader, |
| scala.Function0<T> fn)</code> |
| <div class="block">Run a segment of code using a different context class loader in the current thread</div> |
| </td> |
| </tr> |
| <tr id="i165" class="rowColor"> |
| <td class="colFirst"><code>static <T> T</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#withDummyCallSite-org.apache.spark.SparkContext-scala.Function0-">withDummyCallSite</a></span>(<a href="../../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a> sc, |
| scala.Function0<T> body)</code> |
| <div class="block">To avoid calling <code>Utils.getCallSite</code> for every single RDD we create in the body, |
| set a dummy call site that RDDs use instead.</div> |
| </td> |
| </tr> |
| <tr id="i166" class="altColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#writeByteBuffer-java.nio.ByteBuffer-java.io.DataOutput-">writeByteBuffer</a></span>(java.nio.ByteBuffer bb, |
| java.io.DataOutput out)</code> |
| <div class="block">Primitive often used when writing <code>ByteBuffer</code> to <code>DataOutput</code></div> |
| </td> |
| </tr> |
| <tr id="i167" class="rowColor"> |
| <td class="colFirst"><code>static void</code></td> |
| <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/spark/util/Utils.html#writeByteBuffer-java.nio.ByteBuffer-java.io.OutputStream-">writeByteBuffer</a></span>(java.nio.ByteBuffer bb, |
| java.io.OutputStream out)</code> |
| <div class="block">Primitive often used when writing <code>ByteBuffer</code> to <code>OutputStream</code></div> |
| </td> |
| </tr> |
| </table> |
| <ul class="blockList"> |
| <li class="blockList"><a name="methods.inherited.from.class.Object"> |
| <!-- --> |
| </a> |
| <h3>Methods inherited from class Object</h3> |
| <code>equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li> |
| </ul> |
| </li> |
| </ul> |
| </li> |
| </ul> |
| </div> |
| <div class="details"> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <!-- ========= CONSTRUCTOR DETAIL ======== --> |
| <ul class="blockList"> |
| <li class="blockList"><a name="constructor.detail"> |
| <!-- --> |
| </a> |
| <h3>Constructor Detail</h3> |
| <a name="Utils--"> |
| <!-- --> |
| </a> |
| <ul class="blockListLast"> |
| <li class="blockList"> |
| <h4>Utils</h4> |
| <pre>public Utils()</pre> |
| </li> |
| </ul> |
| </li> |
| </ul> |
| <!-- ============ METHOD DETAIL ========== --> |
| <ul class="blockList"> |
| <li class="blockList"><a name="method.detail"> |
| <!-- --> |
| </a> |
| <h3>Method Detail</h3> |
| <a name="random--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>random</h4> |
| <pre>public static java.util.Random random()</pre> |
| </li> |
| </ul> |
| <a name="DEFAULT_DRIVER_MEM_MB--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>DEFAULT_DRIVER_MEM_MB</h4> |
| <pre>public static int DEFAULT_DRIVER_MEM_MB()</pre> |
| <div class="block">Define a default value for driver memory here since this value is referenced across the code |
| base and nearly all files already use Utils.scala</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="MAX_DIR_CREATION_ATTEMPTS--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>MAX_DIR_CREATION_ATTEMPTS</h4> |
| <pre>public static int MAX_DIR_CREATION_ATTEMPTS()</pre> |
| </li> |
| </ul> |
| <a name="LOCAL_SCHEME--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>LOCAL_SCHEME</h4> |
| <pre>public static String LOCAL_SCHEME()</pre> |
| <div class="block">Scheme used for files that are locally available on worker nodes in the cluster.</div> |
| </li> |
| </ul> |
| <a name="serialize-java.lang.Object-"> |
| <!-- --> |
| </a><a name="serialize-T-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>serialize</h4> |
| <pre>public static <T> byte[] serialize(T o)</pre> |
| <div class="block">Serialize an object using Java serialization</div> |
| </li> |
| </ul> |
| <a name="deserialize-byte:A-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>deserialize</h4> |
| <pre>public static <T> T deserialize(byte[] bytes)</pre> |
| <div class="block">Deserialize an object using Java serialization</div> |
| </li> |
| </ul> |
| <a name="deserialize-byte:A-java.lang.ClassLoader-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>deserialize</h4> |
| <pre>public static <T> T deserialize(byte[] bytes, |
| ClassLoader loader)</pre> |
| <div class="block">Deserialize an object using Java serialization and the given ClassLoader</div> |
| </li> |
| </ul> |
| <a name="deserializeLongValue-byte:A-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>deserializeLongValue</h4> |
| <pre>public static long deserializeLongValue(byte[] bytes)</pre> |
| <div class="block">Deserialize a Long value (used for <code>org.apache.spark.api.python.PythonPartitioner</code>)</div> |
| </li> |
| </ul> |
| <a name="serializeViaNestedStream-java.io.OutputStream-org.apache.spark.serializer.SerializerInstance-scala.Function1-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>serializeViaNestedStream</h4> |
| <pre>public static void serializeViaNestedStream(java.io.OutputStream os, |
| <a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a> ser, |
| scala.Function1<<a href="../../../../org/apache/spark/serializer/SerializationStream.html" title="class in org.apache.spark.serializer">SerializationStream</a>,scala.runtime.BoxedUnit> f)</pre> |
| <div class="block">Serialize via nested stream using specific serializer</div> |
| </li> |
| </ul> |
| <a name="deserializeViaNestedStream-java.io.InputStream-org.apache.spark.serializer.SerializerInstance-scala.Function1-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>deserializeViaNestedStream</h4> |
| <pre>public static void deserializeViaNestedStream(java.io.InputStream is, |
| <a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a> ser, |
| scala.Function1<<a href="../../../../org/apache/spark/serializer/DeserializationStream.html" title="class in org.apache.spark.serializer">DeserializationStream</a>,scala.runtime.BoxedUnit> f)</pre> |
| <div class="block">Deserialize via nested stream using specific serializer</div> |
| </li> |
| </ul> |
| <a name="weakIntern-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>weakIntern</h4> |
| <pre>public static String weakIntern(String s)</pre> |
| <div class="block">String interning to reduce the memory usage.</div> |
| </li> |
| </ul> |
| <a name="getSparkClassLoader--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getSparkClassLoader</h4> |
| <pre>public static ClassLoader getSparkClassLoader()</pre> |
| <div class="block">Get the ClassLoader which loaded Spark.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getContextOrSparkClassLoader--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getContextOrSparkClassLoader</h4> |
| <pre>public static ClassLoader getContextOrSparkClassLoader()</pre> |
| <div class="block">Get the Context ClassLoader on this thread or, if not present, the ClassLoader that |
| loaded Spark. |
| <p> |
| This should be used whenever passing a ClassLoader to Class.ForName or finding the currently |
| active loader when setting up ClassLoader delegation chains.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="classIsLoadable-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>classIsLoadable</h4> |
| <pre>public static boolean classIsLoadable(String clazz)</pre> |
| <div class="block">Determines whether the provided class is loadable in the current thread.</div> |
| </li> |
| </ul> |
| <a name="classForName-java.lang.String-boolean-boolean-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>classForName</h4> |
| <pre>public static <C> Class<C> classForName(String className, |
| boolean initialize, |
| boolean noSparkClassLoader)</pre> |
| <div class="block">Preferred alternative to Class.forName(className), as well as |
| Class.forName(className, initialize, loader) with current thread's ContextClassLoader.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>className</code> - (undocumented)</dd> |
| <dd><code>initialize</code> - (undocumented)</dd> |
| <dd><code>noSparkClassLoader</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="withContextClassLoader-java.lang.ClassLoader-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>withContextClassLoader</h4> |
| <pre>public static <T> T withContextClassLoader(ClassLoader ctxClassLoader, |
| scala.Function0<T> fn)</pre> |
| <div class="block">Run a segment of code using a different context class loader in the current thread</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>ctxClassLoader</code> - (undocumented)</dd> |
| <dd><code>fn</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="writeByteBuffer-java.nio.ByteBuffer-java.io.DataOutput-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>writeByteBuffer</h4> |
| <pre>public static void writeByteBuffer(java.nio.ByteBuffer bb, |
| java.io.DataOutput out)</pre> |
| <div class="block">Primitive often used when writing <code>ByteBuffer</code> to <code>DataOutput</code></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>bb</code> - (undocumented)</dd> |
| <dd><code>out</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="writeByteBuffer-java.nio.ByteBuffer-java.io.OutputStream-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>writeByteBuffer</h4> |
| <pre>public static void writeByteBuffer(java.nio.ByteBuffer bb, |
| java.io.OutputStream out)</pre> |
| <div class="block">Primitive often used when writing <code>ByteBuffer</code> to <code>OutputStream</code></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>bb</code> - (undocumented)</dd> |
| <dd><code>out</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="chmod700-java.io.File-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>chmod700</h4> |
| <pre>public static boolean chmod700(java.io.File file)</pre> |
| <div class="block">JDK equivalent of <code>chmod 700 file</code>. |
| <p></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>file</code> - the file whose permissions will be modified</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>true if the permissions were successfully changed, false otherwise.</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="createDirectory-java.io.File-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>createDirectory</h4> |
| <pre>public static boolean createDirectory(java.io.File dir)</pre> |
| <div class="block">Create a directory given the abstract pathname</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>dir</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>true, if the directory is successfully created; otherwise, return false.</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="createDirectory-java.lang.String-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>createDirectory</h4> |
| <pre>public static java.io.File createDirectory(String root, |
| String namePrefix)</pre> |
| <div class="block">Create a directory inside the given parent directory. The directory is guaranteed to be |
| newly created, and is not marked for automatic deletion.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>root</code> - (undocumented)</dd> |
| <dd><code>namePrefix</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="createTempDir--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>createTempDir</h4> |
| <pre>public static java.io.File createTempDir()</pre> |
| <div class="block">Create a temporary directory inside the <code>java.io.tmpdir</code> prefixed with <code>spark</code>. |
| The directory will be automatically deleted when the VM shuts down.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="createTempDir-java.lang.String-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>createTempDir</h4> |
| <pre>public static java.io.File createTempDir(String root, |
| String namePrefix)</pre> |
| <div class="block">Create a temporary directory inside the given parent directory. The directory will be |
| automatically deleted when the VM shuts down.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>root</code> - (undocumented)</dd> |
| <dd><code>namePrefix</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="copyStream-java.io.InputStream-java.io.OutputStream-boolean-boolean-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>copyStream</h4> |
| <pre>public static long copyStream(java.io.InputStream in, |
| java.io.OutputStream out, |
| boolean closeStreams, |
| boolean transferToEnabled)</pre> |
| <div class="block">Copy all data from an InputStream to an OutputStream. NIO way of file stream to file stream |
| copying is disabled by default unless explicitly set transferToEnabled as true, |
| the parameter transferToEnabled should be configured by spark.file.transferTo = [true|false].</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>in</code> - (undocumented)</dd> |
| <dd><code>out</code> - (undocumented)</dd> |
| <dd><code>closeStreams</code> - (undocumented)</dd> |
| <dd><code>transferToEnabled</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="copyStreamUpTo-java.io.InputStream-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>copyStreamUpTo</h4> |
| <pre>public static java.io.InputStream copyStreamUpTo(java.io.InputStream in, |
| long maxSize)</pre> |
| <div class="block">Copy the first <code>maxSize</code> bytes of data from the InputStream to an in-memory |
| buffer, primarily to check for corruption. |
| <p> |
| This returns a new InputStream which contains the same data as the original input stream. |
| It may be entirely on in-memory buffer, or it may be a combination of in-memory data, and then |
| continue to read from the original stream. The only real use of this is if the original input |
| stream will potentially detect corruption while the data is being read (e.g. from compression). |
| This allows for an eager check of corruption in the first maxSize bytes of data. |
| <p></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>in</code> - (undocumented)</dd> |
| <dd><code>maxSize</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>An InputStream which includes all data from the original stream (combining buffered |
| data and remaining data in the original stream)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="copyFileStreamNIO-java.nio.channels.FileChannel-java.nio.channels.WritableByteChannel-long-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>copyFileStreamNIO</h4> |
| <pre>public static void copyFileStreamNIO(java.nio.channels.FileChannel input, |
| java.nio.channels.WritableByteChannel output, |
| long startPosition, |
| long bytesToCopy)</pre> |
| </li> |
| </ul> |
| <a name="encodeFileNameToURIRawPath-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>encodeFileNameToURIRawPath</h4> |
| <pre>public static String encodeFileNameToURIRawPath(String fileName)</pre> |
| <div class="block">A file name may contain some invalid URI characters, such as " ". This method will convert the |
| file name to a raw path accepted by <code>java.net.URI(String)</code>. |
| <p> |
| Note: the file name must not contain "/" or "\"</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>fileName</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="decodeFileNameInURI-java.net.URI-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>decodeFileNameInURI</h4> |
| <pre>public static String decodeFileNameInURI(java.net.URI uri)</pre> |
| <div class="block">Get the file name from uri's raw path and decode it. If the raw path of uri ends with "/", |
| return the name before the last "/".</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>uri</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="fetchFile-java.lang.String-java.io.File-org.apache.spark.SparkConf-org.apache.hadoop.conf.Configuration-long-boolean-boolean-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>fetchFile</h4> |
| <pre>public static java.io.File fetchFile(String url, |
| java.io.File targetDir, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| org.apache.hadoop.conf.Configuration hadoopConf, |
| long timestamp, |
| boolean useCache, |
| boolean shouldUntar)</pre> |
| <div class="block">Download a file or directory to target directory. Supports fetching the file in a variety of |
| ways, including HTTP, Hadoop-compatible filesystems, and files on a standard filesystem, based |
| on the URL parameter. Fetching directories is only supported from Hadoop-compatible |
| filesystems. |
| <p> |
| If <code>useCache</code> is true, first attempts to fetch the file to a local cache that's shared |
| across executors running the same application. <code>useCache</code> is used mainly for |
| the executors, and not in local mode. |
| <p> |
| Throws SparkException if the target file already exists and has different contents than |
| the requested file. |
| <p> |
| If <code>shouldUntar</code> is true, it untars the given url if it is a tar.gz or tgz into <code>targetDir</code>. |
| This is a legacy behavior, and users should better use <code>spark.archives</code> configuration or |
| <code>SparkContext.addArchive</code></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>url</code> - (undocumented)</dd> |
| <dd><code>targetDir</code> - (undocumented)</dd> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dd><code>hadoopConf</code> - (undocumented)</dd> |
| <dd><code>timestamp</code> - (undocumented)</dd> |
| <dd><code>useCache</code> - (undocumented)</dd> |
| <dd><code>shouldUntar</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="unpack-java.io.File-java.io.File-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>unpack</h4> |
| <pre>public static void unpack(java.io.File source, |
| java.io.File dest)</pre> |
| <div class="block">Unpacks an archive file into the specified directory. It expects .jar, .zip, .tar.gz, .tgz |
| and .tar files. This behaves same as Hadoop's archive in distributed cache. This method is |
| basically copied from <code>org.apache.hadoop.yarn.util.FSDownload.unpack</code>.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>source</code> - (undocumented)</dd> |
| <dd><code>dest</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="timeTakenMs-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>timeTakenMs</h4> |
| <pre>public static <T> scala.Tuple2<T,Object> timeTakenMs(scala.Function0<T> body)</pre> |
| <div class="block">Records the duration of running `body`.</div> |
| </li> |
| </ul> |
| <a name="doFetchFile-java.lang.String-java.io.File-java.lang.String-org.apache.spark.SparkConf-org.apache.hadoop.conf.Configuration-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>doFetchFile</h4> |
| <pre>public static java.io.File doFetchFile(String url, |
| java.io.File targetDir, |
| String filename, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| org.apache.hadoop.conf.Configuration hadoopConf)</pre> |
| <div class="block">Download a file or directory to target directory. Supports fetching the file in a variety of |
| ways, including HTTP, Hadoop-compatible filesystems, and files on a standard filesystem, based |
| on the URL parameter. Fetching directories is only supported from Hadoop-compatible |
| filesystems. |
| <p> |
| Throws SparkException if the target file already exists and has different contents than |
| the requested file.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>url</code> - (undocumented)</dd> |
| <dd><code>targetDir</code> - (undocumented)</dd> |
| <dd><code>filename</code> - (undocumented)</dd> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dd><code>hadoopConf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="validateURL-java.net.URI-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>validateURL</h4> |
| <pre>public static void validateURL(java.net.URI uri) |
| throws java.net.MalformedURLException</pre> |
| <div class="block">Validate that a given URI is actually a valid URL as well.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>uri</code> - The URI to validate</dd> |
| <dt><span class="throwsLabel">Throws:</span></dt> |
| <dd><code>java.net.MalformedURLException</code></dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getLocalDir-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getLocalDir</h4> |
| <pre>public static String getLocalDir(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| <div class="block">Get the path of a temporary directory. Spark's local directories can be configured through |
| multiple settings, which are used with the following precedence: |
| <p> |
| - If called from inside of a YARN container, this will return a directory chosen by YARN. |
| - If the SPARK_LOCAL_DIRS environment variable is set, this will return a directory from it. |
| - Otherwise, if the spark.local.dir is set, this will return a directory from it. |
| - Otherwise, this will return java.io.tmpdir. |
| <p> |
| Some of these configuration options might be lists of multiple paths, but this method will |
| always return a single directory. The return directory is chosen randomly from the array |
| of directories it gets from getOrCreateLocalRootDirs.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="isInRunningSparkTask--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isInRunningSparkTask</h4> |
| <pre>public static boolean isInRunningSparkTask()</pre> |
| <div class="block">Returns if the current codes are running in a Spark task, e.g., in executors.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getConfiguredLocalDirs-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getConfiguredLocalDirs</h4> |
| <pre>public static String[] getConfiguredLocalDirs(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| <div class="block">Return the configured local directories where Spark can write files. This |
| method does not create any directories on its own, it only encapsulates the |
| logic of locating the local directories according to deployment mode.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="randomize-scala.collection.TraversableOnce-scala.reflect.ClassTag-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>randomize</h4> |
| <pre>public static <T> scala.collection.Seq<T> randomize(scala.collection.TraversableOnce<T> seq, |
| scala.reflect.ClassTag<T> evidence$1)</pre> |
| <div class="block">Shuffle the elements of a collection into a random order, returning the |
| result in a new collection. Unlike scala.util.Random.shuffle, this method |
| uses a local random number generator, avoiding inter-thread contention.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>seq</code> - (undocumented)</dd> |
| <dd><code>evidence$1</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="randomizeInPlace-java.lang.Object-java.util.Random-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>randomizeInPlace</h4> |
| <pre>public static <T> Object randomizeInPlace(Object arr, |
| java.util.Random rand)</pre> |
| <div class="block">Shuffle the elements of an array into a random order, modifying the |
| original array. Returns the original array.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>arr</code> - (undocumented)</dd> |
| <dd><code>rand</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="setCustomHostname-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>setCustomHostname</h4> |
| <pre>public static void setCustomHostname(String hostname)</pre> |
| <div class="block">Allow setting a custom host name because when we run on Mesos we need to use the same |
| hostname it reports to the master.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>hostname</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="localCanonicalHostName--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>localCanonicalHostName</h4> |
| <pre>public static String localCanonicalHostName()</pre> |
| <div class="block">Get the local machine's FQDN.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="localHostName--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>localHostName</h4> |
| <pre>public static String localHostName()</pre> |
| <div class="block">Get the local machine's hostname. |
| In case of IPv6, getHostAddress may return '0:0:0:0:0:0:0:1'.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="localHostNameForURI--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>localHostNameForURI</h4> |
| <pre>public static String localHostNameForURI()</pre> |
| <div class="block">Get the local machine's URI.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="checkHost-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>checkHost</h4> |
| <pre>public static void checkHost(String host)</pre> |
| <div class="block">Checks if the host contains only valid hostname/ip without port |
| NOTE: Incase of IPV6 ip it should be enclosed inside []</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>host</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="checkHostPort-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>checkHostPort</h4> |
| <pre>public static void checkHostPort(String hostPort)</pre> |
| </li> |
| </ul> |
| <a name="parseHostPort-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>parseHostPort</h4> |
| <pre>public static scala.Tuple2<String,Object> parseHostPort(String hostPort)</pre> |
| </li> |
| </ul> |
| <a name="getUsedTimeNs-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getUsedTimeNs</h4> |
| <pre>public static String getUsedTimeNs(long startTimeNs)</pre> |
| <div class="block">Return the string to tell how long has passed in milliseconds.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>startTimeNs</code> - - a timestamp in nanoseconds returned by <code>System.nanoTime</code>.</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="recursiveList-java.io.File-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>recursiveList</h4> |
| <pre>public static java.io.File[] recursiveList(java.io.File f)</pre> |
| <div class="block">Lists files recursively.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>f</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="deleteRecursively-java.io.File-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>deleteRecursively</h4> |
| <pre>public static void deleteRecursively(java.io.File file)</pre> |
| <div class="block">Delete a file or directory and its contents recursively. |
| Don't follow directories if they are symlinks. |
| Throws an exception if deletion is unsuccessful.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>file</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="doesDirectoryContainAnyNewFiles-java.io.File-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>doesDirectoryContainAnyNewFiles</h4> |
| <pre>public static boolean doesDirectoryContainAnyNewFiles(java.io.File dir, |
| long cutoff)</pre> |
| <div class="block">Determines if a directory contains any files newer than cutoff seconds. |
| <p></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>dir</code> - must be the path to a directory, or IllegalArgumentException is thrown</dd> |
| <dd><code>cutoff</code> - measured in seconds. Returns true if there are any files or directories in the |
| given directory whose last modified time is later than this many seconds ago</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="timeStringAsMs-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>timeStringAsMs</h4> |
| <pre>public static long timeStringAsMs(String str)</pre> |
| <div class="block">Convert a time parameter such as (50s, 100ms, or 250us) to milliseconds for internal use. If |
| no suffix is provided, the passed number is assumed to be in ms.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>str</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="timeStringAsSeconds-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>timeStringAsSeconds</h4> |
| <pre>public static long timeStringAsSeconds(String str)</pre> |
| <div class="block">Convert a time parameter such as (50s, 100ms, or 250us) to seconds for internal use. If |
| no suffix is provided, the passed number is assumed to be in seconds.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>str</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="byteStringAsBytes-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>byteStringAsBytes</h4> |
| <pre>public static long byteStringAsBytes(String str)</pre> |
| <div class="block">Convert a passed byte string (e.g. 50b, 100k, or 250m) to bytes for internal use. |
| <p> |
| If no suffix is provided, the passed number is assumed to be in bytes.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>str</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="byteStringAsKb-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>byteStringAsKb</h4> |
| <pre>public static long byteStringAsKb(String str)</pre> |
| <div class="block">Convert a passed byte string (e.g. 50b, 100k, or 250m) to kibibytes for internal use. |
| <p> |
| If no suffix is provided, the passed number is assumed to be in kibibytes.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>str</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="byteStringAsMb-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>byteStringAsMb</h4> |
| <pre>public static long byteStringAsMb(String str)</pre> |
| <div class="block">Convert a passed byte string (e.g. 50b, 100k, or 250m) to mebibytes for internal use. |
| <p> |
| If no suffix is provided, the passed number is assumed to be in mebibytes.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>str</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="byteStringAsGb-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>byteStringAsGb</h4> |
| <pre>public static long byteStringAsGb(String str)</pre> |
| <div class="block">Convert a passed byte string (e.g. 50b, 100k, or 250m, 500g) to gibibytes for internal use. |
| <p> |
| If no suffix is provided, the passed number is assumed to be in gibibytes.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>str</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="memoryStringToMb-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>memoryStringToMb</h4> |
| <pre>public static int memoryStringToMb(String str)</pre> |
| <div class="block">Convert a Java memory parameter passed to -Xmx (such as 300m or 1g) to a number of mebibytes.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>str</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="bytesToString-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>bytesToString</h4> |
| <pre>public static String bytesToString(long size)</pre> |
| <div class="block">Convert a quantity in bytes to a human-readable string such as "4.0 MiB".</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>size</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="bytesToString-scala.math.BigInt-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>bytesToString</h4> |
| <pre>public static String bytesToString(scala.math.BigInt size)</pre> |
| </li> |
| </ul> |
| <a name="msDurationToString-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>msDurationToString</h4> |
| <pre>public static String msDurationToString(long ms)</pre> |
| <div class="block">Returns a human-readable string representing a duration such as "35ms"</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>ms</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="megabytesToString-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>megabytesToString</h4> |
| <pre>public static String megabytesToString(long megabytes)</pre> |
| <div class="block">Convert a quantity in megabytes to a human-readable string such as "4.0 MiB".</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>megabytes</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="executeCommand-scala.collection.Seq-java.io.File-scala.collection.Map-boolean-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>executeCommand</h4> |
| <pre>public static Process executeCommand(scala.collection.Seq<String> command, |
| java.io.File workingDir, |
| scala.collection.Map<String,String> extraEnvironment, |
| boolean redirectStderr)</pre> |
| <div class="block">Execute a command and return the process running the command.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>command</code> - (undocumented)</dd> |
| <dd><code>workingDir</code> - (undocumented)</dd> |
| <dd><code>extraEnvironment</code> - (undocumented)</dd> |
| <dd><code>redirectStderr</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="executeAndGetOutput-scala.collection.Seq-java.io.File-scala.collection.Map-boolean-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>executeAndGetOutput</h4> |
| <pre>public static String executeAndGetOutput(scala.collection.Seq<String> command, |
| java.io.File workingDir, |
| scala.collection.Map<String,String> extraEnvironment, |
| boolean redirectStderr)</pre> |
| <div class="block">Execute a command and get its output, throwing an exception if it yields a code other than 0.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>command</code> - (undocumented)</dd> |
| <dd><code>workingDir</code> - (undocumented)</dd> |
| <dd><code>extraEnvironment</code> - (undocumented)</dd> |
| <dd><code>redirectStderr</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="processStreamByLine-java.lang.String-java.io.InputStream-scala.Function1-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>processStreamByLine</h4> |
| <pre>public static Thread processStreamByLine(String threadName, |
| java.io.InputStream inputStream, |
| scala.Function1<String,scala.runtime.BoxedUnit> processLine)</pre> |
| <div class="block">Return and start a daemon thread that processes the content of the input stream line by line.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>threadName</code> - (undocumented)</dd> |
| <dd><code>inputStream</code> - (undocumented)</dd> |
| <dd><code>processLine</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="tryOrExit-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>tryOrExit</h4> |
| <pre>public static void tryOrExit(scala.Function0<scala.runtime.BoxedUnit> block)</pre> |
| <div class="block">Execute a block of code that evaluates to Unit, forwarding any uncaught exceptions to the |
| default UncaughtExceptionHandler |
| <p> |
| NOTE: This method is to be called by the spark-started JVM process.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>block</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="tryOrStopSparkContext-org.apache.spark.SparkContext-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>tryOrStopSparkContext</h4> |
| <pre>public static void tryOrStopSparkContext(<a href="../../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a> sc, |
| scala.Function0<scala.runtime.BoxedUnit> block)</pre> |
| <div class="block">Execute a block of code that evaluates to Unit, stop SparkContext if there is any uncaught |
| exception |
| <p> |
| NOTE: This method is to be called by the driver-side components to avoid stopping the |
| user-started JVM process completely; in contrast, tryOrExit is to be called in the |
| spark-started JVM process .</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>sc</code> - (undocumented)</dd> |
| <dd><code>block</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="tryOrIOException-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>tryOrIOException</h4> |
| <pre>public static <T> T tryOrIOException(scala.Function0<T> block)</pre> |
| <div class="block">Execute a block of code that returns a value, re-throwing any non-fatal uncaught |
| exceptions as IOException. This is used when implementing Externalizable and Serializable's |
| read and write methods, since Java's serializer will not report non-IOExceptions properly; |
| see SPARK-4080 for more context.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>block</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="tryLogNonFatalError-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>tryLogNonFatalError</h4> |
| <pre>public static void tryLogNonFatalError(scala.Function0<scala.runtime.BoxedUnit> block)</pre> |
| <div class="block">Executes the given block. Log non-fatal errors if any, and only throw fatal errors</div> |
| </li> |
| </ul> |
| <a name="tryWithSafeFinally-scala.Function0-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>tryWithSafeFinally</h4> |
| <pre>public static <T> T tryWithSafeFinally(scala.Function0<T> block, |
| scala.Function0<scala.runtime.BoxedUnit> finallyBlock)</pre> |
| <div class="block">Execute a block of code, then a finally block, but if exceptions happen in |
| the finally block, do not suppress the original exception. |
| <p> |
| This is primarily an issue with <code>finally { out.close() }</code> blocks, where |
| close needs to be called to clean up <code>out</code>, but if an exception happened |
| in <code>out.write</code>, it's likely <code>out</code> may be corrupted and <code>out.close</code> will |
| fail as well. This would then suppress the original/likely more meaningful |
| exception from the original <code>out.write</code> call.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>block</code> - (undocumented)</dd> |
| <dd><code>finallyBlock</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="tryWithSafeFinallyAndFailureCallbacks-scala.Function0-scala.Function0-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>tryWithSafeFinallyAndFailureCallbacks</h4> |
| <pre>public static <T> T tryWithSafeFinallyAndFailureCallbacks(scala.Function0<T> block, |
| scala.Function0<scala.runtime.BoxedUnit> catchBlock, |
| scala.Function0<scala.runtime.BoxedUnit> finallyBlock)</pre> |
| <div class="block">Execute a block of code and call the failure callbacks in the catch block. If exceptions occur |
| in either the catch or the finally block, they are appended to the list of suppressed |
| exceptions in original exception which is then rethrown. |
| <p> |
| This is primarily an issue with <code>catch { abort() }</code> or <code>finally { out.close() }</code> blocks, |
| where the abort/close needs to be called to clean up <code>out</code>, but if an exception happened |
| in <code>out.write</code>, it's likely <code>out</code> may be corrupted and <code>abort</code> or <code>out.close</code> will |
| fail as well. This would then suppress the original/likely more meaningful |
| exception from the original <code>out.write</code> call.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>block</code> - (undocumented)</dd> |
| <dd><code>catchBlock</code> - (undocumented)</dd> |
| <dd><code>finallyBlock</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getCallSite-scala.Function1-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getCallSite</h4> |
| <pre>public static org.apache.spark.util.CallSite getCallSite(scala.Function1<String,Object> skipClass)</pre> |
| <div class="block">When called inside a class in the spark package, returns the name of the user code class |
| (outside the spark package) that called into Spark, as well as which Spark method they called. |
| This is used, for example, to tell users where in their code each RDD got created. |
| <p></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>skipClass</code> - Function that is used to exclude non-user-code classes.</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getFileLength-java.io.File-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getFileLength</h4> |
| <pre>public static long getFileLength(java.io.File file, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> workConf)</pre> |
| <div class="block">Return the file length, if the file is compressed it returns the uncompressed file length. |
| It also caches the uncompressed file size to avoid repeated decompression. The cache size is |
| read from workerConf.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>file</code> - (undocumented)</dd> |
| <dd><code>workConf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="offsetBytes-java.lang.String-long-long-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>offsetBytes</h4> |
| <pre>public static String offsetBytes(String path, |
| long length, |
| long start, |
| long end)</pre> |
| <div class="block">Return a string containing part of a file from byte 'start' to 'end'.</div> |
| </li> |
| </ul> |
| <a name="offsetBytes-scala.collection.Seq-scala.collection.Seq-long-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>offsetBytes</h4> |
| <pre>public static String offsetBytes(scala.collection.Seq<java.io.File> files, |
| scala.collection.Seq<Object> fileLengths, |
| long start, |
| long end)</pre> |
| <div class="block">Return a string containing data across a set of files. The <code>startIndex</code> |
| and <code>endIndex</code> is based on the cumulative size of all the files take in |
| the given order. See figure below for more details.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>files</code> - (undocumented)</dd> |
| <dd><code>fileLengths</code> - (undocumented)</dd> |
| <dd><code>start</code> - (undocumented)</dd> |
| <dd><code>end</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="clone-java.lang.Object-org.apache.spark.serializer.SerializerInstance-scala.reflect.ClassTag-"> |
| <!-- --> |
| </a><a name="clone-T-org.apache.spark.serializer.SerializerInstance-scala.reflect.ClassTag-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>clone</h4> |
| <pre>public static <T> T clone(T value, |
| <a href="../../../../org/apache/spark/serializer/SerializerInstance.html" title="class in org.apache.spark.serializer">SerializerInstance</a> serializer, |
| scala.reflect.ClassTag<T> evidence$2)</pre> |
| <div class="block">Clone an object using a Spark serializer.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>value</code> - (undocumented)</dd> |
| <dd><code>serializer</code> - (undocumented)</dd> |
| <dd><code>evidence$2</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="splitCommandString-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>splitCommandString</h4> |
| <pre>public static scala.collection.Seq<String> splitCommandString(String s)</pre> |
| <div class="block">Split a string of potentially quoted arguments from the command line the way that a shell |
| would do it to determine arguments to a command. For example, if the string is 'a "b c" d', |
| then it would be parsed as three arguments: 'a', 'b c' and 'd'.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>s</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="nonNegativeMod-int-int-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>nonNegativeMod</h4> |
| <pre>public static int nonNegativeMod(int x, |
| int mod)</pre> |
| </li> |
| </ul> |
| <a name="nonNegativeHash-java.lang.Object-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>nonNegativeHash</h4> |
| <pre>public static int nonNegativeHash(Object obj)</pre> |
| </li> |
| </ul> |
| <a name="getSystemProperties--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getSystemProperties</h4> |
| <pre>public static scala.collection.Map<String,String> getSystemProperties()</pre> |
| <div class="block">Returns the system properties map that is thread-safe to iterator over. It gets the |
| properties which have been set explicitly, as well as those for which only a default value |
| has been defined.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="times-int-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>times</h4> |
| <pre>public static void times(int numIters, |
| scala.Function0<scala.runtime.BoxedUnit> f)</pre> |
| <div class="block">Method executed for repeating a task for side effects. |
| Unlike a for comprehension, it permits JVM JIT optimization</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>numIters</code> - (undocumented)</dd> |
| <dd><code>f</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="timeIt-int-scala.Function0-scala.Option-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>timeIt</h4> |
| <pre>public static long timeIt(int numIters, |
| scala.Function0<scala.runtime.BoxedUnit> f, |
| scala.Option<scala.Function0<scala.runtime.BoxedUnit>> prepare)</pre> |
| <div class="block">Timing method based on iterations that permit JVM JIT optimization. |
| <p></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>numIters</code> - number of iterations</dd> |
| <dd><code>f</code> - function to be executed. If prepare is not None, the running time of each call to f |
| must be an order of magnitude longer than one nanosecond for accurate timing.</dd> |
| <dd><code>prepare</code> - function to be executed before each call to f. Its running time doesn't count.</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>the total time across all iterations (not counting preparation time) in nanoseconds.</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getIteratorSize-scala.collection.Iterator-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getIteratorSize</h4> |
| <pre>public static long getIteratorSize(scala.collection.Iterator<?> iterator)</pre> |
| <div class="block">Counts the number of elements of an iterator.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>iterator</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getIteratorZipWithIndex-scala.collection.Iterator-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getIteratorZipWithIndex</h4> |
| <pre>public static <T> scala.collection.Iterator<scala.Tuple2<T,Object>> getIteratorZipWithIndex(scala.collection.Iterator<T> iter, |
| long startIndex)</pre> |
| <div class="block">Generate a zipWithIndex iterator, avoid index value overflowing problem |
| in scala's zipWithIndex</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>iter</code> - (undocumented)</dd> |
| <dd><code>startIndex</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="symlink-java.io.File-java.io.File-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>symlink</h4> |
| <pre>public static void symlink(java.io.File src, |
| java.io.File dst)</pre> |
| <div class="block">Creates a symlink. |
| <p></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>src</code> - absolute path to the source</dd> |
| <dd><code>dst</code> - relative path for the destination</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getFormattedClassName-java.lang.Object-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getFormattedClassName</h4> |
| <pre>public static String getFormattedClassName(Object obj)</pre> |
| <div class="block">Return the class name of the given object, removing all dollar signs</div> |
| </li> |
| </ul> |
| <a name="getHadoopFileSystem-java.net.URI-org.apache.hadoop.conf.Configuration-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getHadoopFileSystem</h4> |
| <pre>public static org.apache.hadoop.fs.FileSystem getHadoopFileSystem(java.net.URI path, |
| org.apache.hadoop.conf.Configuration conf)</pre> |
| <div class="block">Return a Hadoop FileSystem with the scheme encoded in the given path.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>path</code> - (undocumented)</dd> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getHadoopFileSystem-java.lang.String-org.apache.hadoop.conf.Configuration-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getHadoopFileSystem</h4> |
| <pre>public static org.apache.hadoop.fs.FileSystem getHadoopFileSystem(String path, |
| org.apache.hadoop.conf.Configuration conf)</pre> |
| <div class="block">Return a Hadoop FileSystem with the scheme encoded in the given path.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>path</code> - (undocumented)</dd> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="isWindows--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isWindows</h4> |
| <pre>public static boolean isWindows()</pre> |
| <div class="block">Whether the underlying operating system is Windows.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="isMac--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isMac</h4> |
| <pre>public static boolean isMac()</pre> |
| <div class="block">Whether the underlying operating system is Mac OS X.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="isMacOnAppleSilicon--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isMacOnAppleSilicon</h4> |
| <pre>public static boolean isMacOnAppleSilicon()</pre> |
| <div class="block">Whether the underlying operating system is Mac OS X and processor is Apple Silicon.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="preferIPv6--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>preferIPv6</h4> |
| <pre>public static boolean preferIPv6()</pre> |
| <div class="block">Whether the underlying JVM prefer IPv6 addresses.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="windowsDrive--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>windowsDrive</h4> |
| <pre>public static scala.util.matching.Regex windowsDrive()</pre> |
| <div class="block">Pattern for matching a Windows drive, which contains only a single alphabet character.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="isTesting--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isTesting</h4> |
| <pre>public static boolean isTesting()</pre> |
| <div class="block">Indicates whether Spark is currently running unit tests.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="terminateProcess-java.lang.Process-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>terminateProcess</h4> |
| <pre>public static scala.Option<Object> terminateProcess(Process process, |
| long timeoutMs)</pre> |
| <div class="block">Terminates a process waiting for at most the specified duration. |
| <p></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>process</code> - (undocumented)</dd> |
| <dd><code>timeoutMs</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>the process exit value if it was successfully terminated, else None</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getStderr-java.lang.Process-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getStderr</h4> |
| <pre>public static scala.Option<String> getStderr(Process process, |
| long timeoutMs)</pre> |
| <div class="block">Return the stderr of a process after waiting for the process to terminate. |
| If the process does not terminate within the specified timeout, return None.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>process</code> - (undocumented)</dd> |
| <dd><code>timeoutMs</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="logUncaughtExceptions-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>logUncaughtExceptions</h4> |
| <pre>public static <T> T logUncaughtExceptions(scala.Function0<T> f)</pre> |
| <div class="block">Execute the given block, logging and re-throwing any uncaught exception. |
| This is particularly useful for wrapping code that runs in a thread, to ensure |
| that exceptions are printed, and to avoid having to catch Throwable.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>f</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="tryLog-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>tryLog</h4> |
| <pre>public static <T> scala.util.Try<T> tryLog(scala.Function0<T> f)</pre> |
| <div class="block">Executes the given block in a Try, logging any uncaught exceptions.</div> |
| </li> |
| </ul> |
| <a name="isFatalError-java.lang.Throwable-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isFatalError</h4> |
| <pre>public static boolean isFatalError(Throwable e)</pre> |
| <div class="block">Returns true if the given exception was fatal. See docs for scala.util.control.NonFatal.</div> |
| </li> |
| </ul> |
| <a name="resolveURI-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>resolveURI</h4> |
| <pre>public static java.net.URI resolveURI(String path)</pre> |
| <div class="block">Return a well-formed URI for the file described by a user input string. |
| <p> |
| If the supplied path does not contain a scheme, or is a relative path, it will be |
| converted into an absolute path with a file:// scheme.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>path</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="resolveURIs-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>resolveURIs</h4> |
| <pre>public static String resolveURIs(String paths)</pre> |
| <div class="block">Resolve a comma-separated list of paths.</div> |
| </li> |
| </ul> |
| <a name="isAbsoluteURI-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isAbsoluteURI</h4> |
| <pre>public static boolean isAbsoluteURI(String path)</pre> |
| <div class="block">Check whether a path is an absolute URI.</div> |
| </li> |
| </ul> |
| <a name="nonLocalPaths-java.lang.String-boolean-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>nonLocalPaths</h4> |
| <pre>public static String[] nonLocalPaths(String paths, |
| boolean testWindows)</pre> |
| <div class="block">Return all non-local paths from a comma-separated list of paths.</div> |
| </li> |
| </ul> |
| <a name="loadDefaultSparkProperties-org.apache.spark.SparkConf-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>loadDefaultSparkProperties</h4> |
| <pre>public static String loadDefaultSparkProperties(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| String filePath)</pre> |
| <div class="block">Load default Spark properties from the given file. If no file is provided, |
| use the common defaults file. This mutates state in the given SparkConf and |
| in this JVM's system properties if the config specified in the file is not |
| already set. Return the path of the properties file used.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dd><code>filePath</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="updateSparkConfigFromProperties-org.apache.spark.SparkConf-scala.collection.Map-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>updateSparkConfigFromProperties</h4> |
| <pre>public static void updateSparkConfigFromProperties(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| scala.collection.Map<String,String> properties)</pre> |
| <div class="block">Updates Spark config with properties from a set of Properties. |
| Provided properties have the highest priority.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dd><code>properties</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getPropertiesFromFile-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getPropertiesFromFile</h4> |
| <pre>public static scala.collection.Map<String,String> getPropertiesFromFile(String filename)</pre> |
| <div class="block">Load properties present in the given file.</div> |
| </li> |
| </ul> |
| <a name="getDefaultPropertiesFile-scala.collection.Map-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getDefaultPropertiesFile</h4> |
| <pre>public static String getDefaultPropertiesFile(scala.collection.Map<String,String> env)</pre> |
| <div class="block">Return the path of the default Spark properties file.</div> |
| </li> |
| </ul> |
| <a name="exceptionString-java.lang.Throwable-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>exceptionString</h4> |
| <pre>public static String exceptionString(Throwable e)</pre> |
| <div class="block">Return a nice string representation of the exception. It will call "printStackTrace" to |
| recursively generate the stack trace including the exception and its causes.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>e</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getThreadDump--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getThreadDump</h4> |
| <pre>public static <a href="../../../../org/apache/spark/status/api/v1/ThreadStackTrace.html" title="class in org.apache.spark.status.api.v1">ThreadStackTrace</a>[] getThreadDump()</pre> |
| <div class="block">Return a thread dump of all threads' stacktraces. Used to capture dumps for the web UI</div> |
| </li> |
| </ul> |
| <a name="getThreadDumpForThread-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getThreadDumpForThread</h4> |
| <pre>public static scala.Option<<a href="../../../../org/apache/spark/status/api/v1/ThreadStackTrace.html" title="class in org.apache.spark.status.api.v1">ThreadStackTrace</a>> getThreadDumpForThread(long threadId)</pre> |
| </li> |
| </ul> |
| <a name="sparkJavaOpts-org.apache.spark.SparkConf-scala.Function1-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>sparkJavaOpts</h4> |
| <pre>public static scala.collection.Seq<String> sparkJavaOpts(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| scala.Function1<String,Object> filterKey)</pre> |
| <div class="block">Convert all spark properties set in the given SparkConf to a sequence of java options.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dd><code>filterKey</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="portMaxRetries-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>portMaxRetries</h4> |
| <pre>public static int portMaxRetries(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| <div class="block">Maximum number of retries when binding to a port before giving up.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="userPort-int-int-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>userPort</h4> |
| <pre>public static int userPort(int base, |
| int offset)</pre> |
| <div class="block">Returns the user port to try when trying to bind a service. Handles wrapping and skipping |
| privileged ports.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>base</code> - (undocumented)</dd> |
| <dd><code>offset</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="startServiceOnPort-int-scala.Function1-org.apache.spark.SparkConf-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>startServiceOnPort</h4> |
| <pre>public static <T> scala.Tuple2<T,Object> startServiceOnPort(int startPort, |
| scala.Function1<Object,scala.Tuple2<T,Object>> startService, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| String serviceName)</pre> |
| <div class="block">Attempt to start a service on the given port, or fail after a number of attempts. |
| Each subsequent attempt uses 1 + the port used in the previous attempt (unless the port is 0). |
| <p></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>startPort</code> - The initial port to start the service on.</dd> |
| <dd><code>startService</code> - Function to start service on a given port. |
| This is expected to throw java.net.BindException on port collision.</dd> |
| <dd><code>conf</code> - A SparkConf used to get the maximum number of retries when binding to a port.</dd> |
| <dd><code>serviceName</code> - Name of the service.</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(service: T, port: Int)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="isBindCollision-java.lang.Throwable-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isBindCollision</h4> |
| <pre>public static boolean isBindCollision(Throwable exception)</pre> |
| <div class="block">Return whether the exception is caused by an address-port collision when binding.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>exception</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="setLogLevel-org.apache.logging.log4j.Level-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>setLogLevel</h4> |
| <pre>public static void setLogLevel(org.apache.logging.log4j.Level l)</pre> |
| <div class="block">configure a new log4j level</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>l</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="libraryPathEnvName--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>libraryPathEnvName</h4> |
| <pre>public static String libraryPathEnvName()</pre> |
| <div class="block">Return the current system LD_LIBRARY_PATH name</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="libraryPathEnvPrefix-scala.collection.Seq-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>libraryPathEnvPrefix</h4> |
| <pre>public static String libraryPathEnvPrefix(scala.collection.Seq<String> libraryPaths)</pre> |
| <div class="block">Return the prefix of a command that appends the given library paths to the |
| system-specific library path environment variable. On Unix, for instance, |
| this returns the string LD_LIBRARY_PATH="path1:path2:$LD_LIBRARY_PATH".</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>libraryPaths</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getSparkOrYarnConfig-org.apache.spark.SparkConf-java.lang.String-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getSparkOrYarnConfig</h4> |
| <pre>public static String getSparkOrYarnConfig(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| String key, |
| String default_)</pre> |
| <div class="block">Return the value of a config either through the SparkConf or the Hadoop configuration. |
| We Check whether the key is set in the SparkConf before look at any Hadoop configuration. |
| If the key is set in SparkConf, no matter whether it is running on YARN or not, |
| gets the value from SparkConf. |
| Only when the key is not set in SparkConf and running on YARN, |
| gets the value from Hadoop configuration.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dd><code>key</code> - (undocumented)</dd> |
| <dd><code>default_</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="extractHostPortFromSparkUrl-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>extractHostPortFromSparkUrl</h4> |
| <pre>public static scala.Tuple2<String,Object> extractHostPortFromSparkUrl(String sparkUrl) |
| throws <a href="../../../../org/apache/spark/SparkException.html" title="class in org.apache.spark">SparkException</a></pre> |
| <div class="block">Return a pair of host and port extracted from the <code>sparkUrl</code>. |
| <p> |
| A spark url (<code>spark://host:port</code>) is a special URI that its scheme is <code>spark</code> and only contains |
| host and port. |
| <p></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>sparkUrl</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| <dt><span class="throwsLabel">Throws:</span></dt> |
| <dd><code><a href="../../../../org/apache/spark/SparkException.html" title="class in org.apache.spark">SparkException</a></code> - if sparkUrl is invalid.</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getCurrentUserName--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getCurrentUserName</h4> |
| <pre>public static String getCurrentUserName()</pre> |
| <div class="block">Returns the current user name. This is the currently logged in user, unless that's been |
| overridden by the <code>SPARK_USER</code> environment variable.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="EMPTY_USER_GROUPS--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>EMPTY_USER_GROUPS</h4> |
| <pre>public static scala.collection.immutable.Set<String> EMPTY_USER_GROUPS()</pre> |
| </li> |
| </ul> |
| <a name="getCurrentUserGroups-org.apache.spark.SparkConf-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getCurrentUserGroups</h4> |
| <pre>public static scala.collection.immutable.Set<String> getCurrentUserGroups(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> sparkConf, |
| String username)</pre> |
| </li> |
| </ul> |
| <a name="parseStandaloneMasterUrls-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>parseStandaloneMasterUrls</h4> |
| <pre>public static String[] parseStandaloneMasterUrls(String masterUrls)</pre> |
| <div class="block">Split the comma delimited string of master URLs into a list. |
| For instance, "spark://abc,def" becomes [spark://abc, spark://def].</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>masterUrls</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="BACKUP_STANDALONE_MASTER_PREFIX--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>BACKUP_STANDALONE_MASTER_PREFIX</h4> |
| <pre>public static String BACKUP_STANDALONE_MASTER_PREFIX()</pre> |
| <div class="block">An identifier that backup masters use in their responses.</div> |
| </li> |
| </ul> |
| <a name="responseFromBackup-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>responseFromBackup</h4> |
| <pre>public static boolean responseFromBackup(String msg)</pre> |
| <div class="block">Return true if the response message is sent from a backup Master on standby.</div> |
| </li> |
| </ul> |
| <a name="withDummyCallSite-org.apache.spark.SparkContext-scala.Function0-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>withDummyCallSite</h4> |
| <pre>public static <T> T withDummyCallSite(<a href="../../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a> sc, |
| scala.Function0<T> body)</pre> |
| <div class="block">To avoid calling <code>Utils.getCallSite</code> for every single RDD we create in the body, |
| set a dummy call site that RDDs use instead. This is for performance optimization.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>sc</code> - (undocumented)</dd> |
| <dd><code>body</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="isInDirectory-java.io.File-java.io.File-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isInDirectory</h4> |
| <pre>public static boolean isInDirectory(java.io.File parent, |
| java.io.File child)</pre> |
| <div class="block">Return whether the specified file is a parent directory of the child file.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>parent</code> - (undocumented)</dd> |
| <dd><code>child</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="isLocalMaster-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isLocalMaster</h4> |
| <pre>public static boolean isLocalMaster(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>whether it is local mode</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="isPushBasedShuffleEnabled-org.apache.spark.SparkConf-boolean-boolean-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isPushBasedShuffleEnabled</h4> |
| <pre>public static boolean isPushBasedShuffleEnabled(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| boolean isDriver, |
| boolean checkSerializer)</pre> |
| <div class="block">Push based shuffle can only be enabled when below conditions are met: |
| - the application is submitted to run in YARN mode |
| - external shuffle service enabled |
| - IO encryption disabled |
| - serializer(such as KryoSerializer) supports relocation of serialized objects</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dd><code>isDriver</code> - (undocumented)</dd> |
| <dd><code>checkSerializer</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="instantiateSerializerOrShuffleManager-java.lang.String-org.apache.spark.SparkConf-boolean-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>instantiateSerializerOrShuffleManager</h4> |
| <pre>public static <T> T instantiateSerializerOrShuffleManager(String className, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| boolean isDriver)</pre> |
| </li> |
| </ul> |
| <a name="instantiateSerializerFromConf-org.apache.spark.internal.config.ConfigEntry-org.apache.spark.SparkConf-boolean-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>instantiateSerializerFromConf</h4> |
| <pre>public static <T> T instantiateSerializerFromConf(org.apache.spark.internal.config.ConfigEntry<String> propertyName, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| boolean isDriver)</pre> |
| </li> |
| </ul> |
| <a name="isDynamicAllocationEnabled-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isDynamicAllocationEnabled</h4> |
| <pre>public static boolean isDynamicAllocationEnabled(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| <div class="block">Return whether dynamic allocation is enabled in the given conf.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="isStreamingDynamicAllocationEnabled-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isStreamingDynamicAllocationEnabled</h4> |
| <pre>public static boolean isStreamingDynamicAllocationEnabled(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| </li> |
| </ul> |
| <a name="getDynamicAllocationInitialExecutors-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getDynamicAllocationInitialExecutors</h4> |
| <pre>public static int getDynamicAllocationInitialExecutors(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| <div class="block">Return the initial number of executors for dynamic allocation.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="tryWithResource-scala.Function0-scala.Function1-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>tryWithResource</h4> |
| <pre>public static <R extends java.io.Closeable,T> T tryWithResource(scala.Function0<R> createResource, |
| scala.Function1<R,T> f)</pre> |
| </li> |
| </ul> |
| <a name="tempFileWith-java.io.File-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>tempFileWith</h4> |
| <pre>public static java.io.File tempFileWith(java.io.File path)</pre> |
| <div class="block">Returns a path of temporary file which is in the same directory with <code>path</code>.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>path</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getProcessName--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getProcessName</h4> |
| <pre>public static String getProcessName()</pre> |
| <div class="block">Returns the name of this JVM process. This is OS dependent but typically (OSX, Linux, Windows), |
| this is formatted as PID@hostname.</div> |
| <dl> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="initDaemon-org.slf4j.Logger-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>initDaemon</h4> |
| <pre>public static void initDaemon(org.slf4j.Logger log)</pre> |
| <div class="block">Utility function that should be called early in <code>main()</code> for daemons to set up some common |
| diagnostic state.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>log</code> - (undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getUserJars-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getUserJars</h4> |
| <pre>public static scala.collection.Seq<String> getUserJars(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| <div class="block">Return the jar files pointed by the "spark.jars" property. Spark internally will distribute |
| these jars through file server. In the YARN mode, it will return an empty list, since YARN |
| has its own mechanism to distribute jars.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getLocalUserJarsForShell-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getLocalUserJarsForShell</h4> |
| <pre>public static scala.collection.Seq<String> getLocalUserJarsForShell(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| <div class="block">Return the local jar files which will be added to REPL's classpath. These jar files are |
| specified by --jars (spark.jars) or --packages, remote jars will be downloaded to local by |
| SparkSubmit at first.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="redact-org.apache.spark.SparkConf-scala.collection.Seq-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>redact</h4> |
| <pre>public static scala.collection.Seq<scala.Tuple2<String,String>> redact(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| scala.collection.Seq<scala.Tuple2<String,String>> kvs)</pre> |
| <div class="block">Redact the sensitive values in the given map. If a map key matches the redaction pattern then |
| its value is replaced with a dummy text.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dd><code>kvs</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="redact-scala.Option-scala.collection.Seq-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>redact</h4> |
| <pre>public static <K,V> scala.collection.Seq<scala.Tuple2<K,V>> redact(scala.Option<scala.util.matching.Regex> regex, |
| scala.collection.Seq<scala.Tuple2<K,V>> kvs)</pre> |
| <div class="block">Redact the sensitive values in the given map. If a map key matches the redaction pattern then |
| its value is replaced with a dummy text.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>regex</code> - (undocumented)</dd> |
| <dd><code>kvs</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="redact-scala.Option-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>redact</h4> |
| <pre>public static String redact(scala.Option<scala.util.matching.Regex> regex, |
| String text)</pre> |
| <div class="block">Redact the sensitive information in the given string.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>regex</code> - (undocumented)</dd> |
| <dd><code>text</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="redact-scala.collection.Map-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>redact</h4> |
| <pre>public static scala.collection.Seq<scala.Tuple2<String,String>> redact(scala.collection.Map<String,String> kvs)</pre> |
| <div class="block">Looks up the redaction regex from within the key value pairs and uses it to redact the rest |
| of the key value pairs. No care is taken to make sure the redaction property itself is not |
| redacted. So theoretically, the property itself could be configured to redact its own value |
| when printing.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>kvs</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="redactCommandLineArgs-org.apache.spark.SparkConf-scala.collection.Seq-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>redactCommandLineArgs</h4> |
| <pre>public static scala.collection.Seq<String> redactCommandLineArgs(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf, |
| scala.collection.Seq<String> commands)</pre> |
| </li> |
| </ul> |
| <a name="stringToSeq-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>stringToSeq</h4> |
| <pre>public static scala.collection.Seq<String> stringToSeq(String str)</pre> |
| </li> |
| </ul> |
| <a name="loadExtensions-java.lang.Class-scala.collection.Seq-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>loadExtensions</h4> |
| <pre>public static <T> scala.collection.Seq<T> loadExtensions(Class<T> extClass, |
| scala.collection.Seq<String> classes, |
| <a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| <div class="block">Create instances of extension classes. |
| <p> |
| The classes in the given list must: |
| - Be sub-classes of the given base class. |
| - Provide either a no-arg constructor, or a 1-arg constructor that takes a SparkConf. |
| <p> |
| The constructors are allowed to throw "UnsupportedOperationException" if the extension does not |
| want to be registered; this allows the implementations to check the Spark configuration (or |
| other state) and decide they do not need to be added. A log message is printed in that case. |
| Other exceptions are bubbled up.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>extClass</code> - (undocumented)</dd> |
| <dd><code>classes</code> - (undocumented)</dd> |
| <dd><code>conf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="checkAndGetK8sMasterUrl-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>checkAndGetK8sMasterUrl</h4> |
| <pre>public static String checkAndGetK8sMasterUrl(String rawMasterURL)</pre> |
| <div class="block">Check the validity of the given Kubernetes master URL and return the resolved URL. Prefix |
| "k8s://" is appended to the resolved URL as the prefix is used by KubernetesClusterManager |
| in canCreate to determine if the KubernetesClusterManager should be used.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>rawMasterURL</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="substituteAppNExecIds-java.lang.String-java.lang.String-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>substituteAppNExecIds</h4> |
| <pre>public static String substituteAppNExecIds(String opt, |
| String appId, |
| String execId)</pre> |
| <div class="block">Replaces all the {{EXECUTOR_ID}} occurrences with the Executor Id |
| and {{APP_ID}} occurrences with the App Id.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>opt</code> - (undocumented)</dd> |
| <dd><code>appId</code> - (undocumented)</dd> |
| <dd><code>execId</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="substituteAppId-java.lang.String-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>substituteAppId</h4> |
| <pre>public static String substituteAppId(String opt, |
| String appId)</pre> |
| <div class="block">Replaces all the {{APP_ID}} occurrences with the App Id.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>opt</code> - (undocumented)</dd> |
| <dd><code>appId</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="createSecret-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>createSecret</h4> |
| <pre>public static String createSecret(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| </li> |
| </ul> |
| <a name="isMemberClass-java.lang.Class-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isMemberClass</h4> |
| <pre>public static boolean isMemberClass(Class<?> cls)</pre> |
| <div class="block">Returns true if and only if the underlying class is a member class. |
| <p> |
| Note: jdk8u throws a "Malformed class name" error if a given class is a deeply-nested |
| inner class (See SPARK-34607 for details). This issue has already been fixed in jdk9+, so |
| we can remove this helper method safely if we drop the support of jdk8u.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>cls</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="getSimpleName-java.lang.Class-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>getSimpleName</h4> |
| <pre>public static String getSimpleName(Class<?> cls)</pre> |
| <div class="block">Safer than Class obj's getSimpleName which may throw Malformed class name error in scala. |
| This method mimics scalatest's getSimpleNameOfAnObjectsClass.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>cls</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="stripDollars-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>stripDollars</h4> |
| <pre>public static String stripDollars(String s)</pre> |
| <div class="block">Remove trailing dollar signs from qualified class name, |
| and return the trailing part after the last dollar sign in the middle</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>s</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="stringHalfWidth-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>stringHalfWidth</h4> |
| <pre>public static int stringHalfWidth(String str)</pre> |
| <div class="block">Return the number of half widths in a given string. Note that a full width character |
| occupies two half widths. |
| <p> |
| For a string consisting of 1 million characters, the execution of this method requires |
| about 50ms.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>str</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="sanitizeDirName-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>sanitizeDirName</h4> |
| <pre>public static String sanitizeDirName(String str)</pre> |
| </li> |
| </ul> |
| <a name="isClientMode-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isClientMode</h4> |
| <pre>public static boolean isClientMode(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> conf)</pre> |
| </li> |
| </ul> |
| <a name="isLocalUri-java.lang.String-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isLocalUri</h4> |
| <pre>public static boolean isLocalUri(String uri)</pre> |
| <div class="block">Returns whether the URI is a "local:" URI.</div> |
| </li> |
| </ul> |
| <a name="isFileSplittable-org.apache.hadoop.fs.Path-org.apache.hadoop.io.compress.CompressionCodecFactory-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isFileSplittable</h4> |
| <pre>public static boolean isFileSplittable(org.apache.hadoop.fs.Path path, |
| org.apache.hadoop.io.compress.CompressionCodecFactory codecFactory)</pre> |
| <div class="block">Check whether the file of the path is splittable.</div> |
| </li> |
| </ul> |
| <a name="cloneProperties-java.util.Properties-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>cloneProperties</h4> |
| <pre>public static java.util.Properties cloneProperties(java.util.Properties props)</pre> |
| <div class="block">Create a new properties object with the same values as `props`</div> |
| </li> |
| </ul> |
| <a name="buildLocationMetadata-scala.collection.Seq-int-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>buildLocationMetadata</h4> |
| <pre>public static String buildLocationMetadata(scala.collection.Seq<org.apache.hadoop.fs.Path> paths, |
| int stopAppendingThreshold)</pre> |
| <div class="block">Convert a sequence of <code>Path</code>s to a metadata string. When the length of metadata string |
| exceeds <code>stopAppendingThreshold</code>, stop appending paths for saving memory.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>paths</code> - (undocumented)</dd> |
| <dd><code>stopAppendingThreshold</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="executorOffHeapMemorySizeAsMb-org.apache.spark.SparkConf-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>executorOffHeapMemorySizeAsMb</h4> |
| <pre>public static int executorOffHeapMemorySizeAsMb(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> sparkConf)</pre> |
| <div class="block">Convert MEMORY_OFFHEAP_SIZE to MB Unit, return 0 if MEMORY_OFFHEAP_ENABLED is false.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>sparkConf</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="checkOffHeapEnabled-org.apache.spark.SparkConf-long-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>checkOffHeapEnabled</h4> |
| <pre>public static long checkOffHeapEnabled(<a href="../../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a> sparkConf, |
| long offHeapSize)</pre> |
| <div class="block">return 0 if MEMORY_OFFHEAP_ENABLED is false.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>sparkConf</code> - (undocumented)</dd> |
| <dd><code>offHeapSize</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="createFailedToGetTokenMessage-java.lang.String-java.lang.Throwable-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>createFailedToGetTokenMessage</h4> |
| <pre>public static String createFailedToGetTokenMessage(String serviceName, |
| Throwable e)</pre> |
| <div class="block">Returns a string message about delegation token generation failure</div> |
| </li> |
| </ul> |
| <a name="unzipFilesFromFile-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.io.File-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>unzipFilesFromFile</h4> |
| <pre>public static scala.collection.Seq<java.io.File> unzipFilesFromFile(org.apache.hadoop.fs.FileSystem fs, |
| org.apache.hadoop.fs.Path dfsZipFile, |
| java.io.File localDir)</pre> |
| <div class="block">Decompress a zip file into a local dir. File names are read from the zip file. Note, we skip |
| addressing the directory here. Also, we rely on the caller side to address any exceptions.</div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>fs</code> - (undocumented)</dd> |
| <dd><code>dfsZipFile</code> - (undocumented)</dd> |
| <dd><code>localDir</code> - (undocumented)</dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| <dd>(undocumented)</dd> |
| </dl> |
| </li> |
| </ul> |
| <a name="median-long:A-boolean-"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>median</h4> |
| <pre>public static long median(long[] sizes, |
| boolean alreadySorted)</pre> |
| <div class="block">Return the median number of a long array |
| <p></div> |
| <dl> |
| <dt><span class="paramLabel">Parameters:</span></dt> |
| <dd><code>sizes</code> - </dd> |
| <dd><code>alreadySorted</code> - </dd> |
| <dt><span class="returnLabel">Returns:</span></dt> |
| </dl> |
| </li> |
| </ul> |
| <a name="isG1GC--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>isG1GC</h4> |
| <pre>public static boolean isG1GC()</pre> |
| </li> |
| </ul> |
| <a name="org:Dapache:Dspark:Dinternal:DLogging:D:Dlog_--"> |
| <!-- --> |
| </a> |
| <ul class="blockList"> |
| <li class="blockList"> |
| <h4>org$apache$spark$internal$Logging$$log_</h4> |
| <pre>public static org.slf4j.Logger org$apache$spark$internal$Logging$$log_()</pre> |
| </li> |
| </ul> |
| <a name="org:Dapache:Dspark:Dinternal:DLogging:D:Dlog__:Deq-org.slf4j.Logger-"> |
| <!-- --> |
| </a> |
| <ul class="blockListLast"> |
| <li class="blockList"> |
| <h4>org$apache$spark$internal$Logging$$log__$eq</h4> |
| <pre>public static void org$apache$spark$internal$Logging$$log__$eq(org.slf4j.Logger x$1)</pre> |
| </li> |
| </ul> |
| </li> |
| </ul> |
| </li> |
| </ul> |
| </div> |
| </div> |
| <!-- ========= END OF CLASS DATA ========= --> |
| <!-- ======= START OF BOTTOM NAVBAR ====== --> |
| <div class="bottomNav"><a name="navbar.bottom"> |
| <!-- --> |
| </a> |
| <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> |
| <a name="navbar.bottom.firstrow"> |
| <!-- --> |
| </a> |
| <ul class="navList" title="Navigation"> |
| <li><a href="../../../../overview-summary.html">Overview</a></li> |
| <li><a href="package-summary.html">Package</a></li> |
| <li class="navBarCell1Rev">Class</li> |
| <li><a href="package-tree.html">Tree</a></li> |
| <li><a href="../../../../deprecated-list.html">Deprecated</a></li> |
| <li><a href="../../../../index-all.html">Index</a></li> |
| <li><a href="../../../../help-doc.html">Help</a></li> |
| </ul> |
| </div> |
| <div class="subNav"> |
| <ul class="navList"> |
| <li><a href="../../../../org/apache/spark/util/ThreadUtils.html" title="class in org.apache.spark.util"><span class="typeNameLink">Prev Class</span></a></li> |
| <li><a href="../../../../org/apache/spark/util/VersionUtils.html" title="class in org.apache.spark.util"><span class="typeNameLink">Next Class</span></a></li> |
| </ul> |
| <ul class="navList"> |
| <li><a href="../../../../index.html?org/apache/spark/util/Utils.html" target="_top">Frames</a></li> |
| <li><a href="Utils.html" target="_top">No Frames</a></li> |
| </ul> |
| <ul class="navList" id="allclasses_navbar_bottom"> |
| <li><a href="../../../../allclasses-noframe.html">All Classes</a></li> |
| </ul> |
| <div> |
| <script type="text/javascript"><!-- |
| allClassesLink = document.getElementById("allclasses_navbar_bottom"); |
| if(window==top) { |
| allClassesLink.style.display = "block"; |
| } |
| else { |
| allClassesLink.style.display = "none"; |
| } |
| //--> |
| </script> |
| </div> |
| <div> |
| <ul class="subNavList"> |
| <li>Summary: </li> |
| <li>Nested | </li> |
| <li>Field | </li> |
| <li><a href="#constructor.summary">Constr</a> | </li> |
| <li><a href="#method.summary">Method</a></li> |
| </ul> |
| <ul class="subNavList"> |
| <li>Detail: </li> |
| <li>Field | </li> |
| <li><a href="#constructor.detail">Constr</a> | </li> |
| <li><a href="#method.detail">Method</a></li> |
| </ul> |
| </div> |
| <a name="skip.navbar.bottom"> |
| <!-- --> |
| </a></div> |
| <!-- ======== END OF BOTTOM NAVBAR ======= --> |
| <script defer="defer" type="text/javascript" src="../../../../lib/jquery.js"></script><script defer="defer" type="text/javascript" src="../../../../lib/api-javadocs.js"></script></body> |
| </html> |