blob: eecf5a48b2976d0eeaa969def4e64ef4605d6135 [file] [log] [blame]
<!DOCTYPE html >
<html>
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" />
<title>Spark 3.5.3 ScalaDoc - org.apache.spark.sql.DataFrameWriter</title>
<meta name="description" content="Spark 3.5.3 ScalaDoc - org.apache.spark.sql.DataFrameWriter" />
<meta name="keywords" content="Spark 3.5.3 ScalaDoc org.apache.spark.sql.DataFrameWriter" />
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<link href="../../../../lib/index.css" media="screen" type="text/css" rel="stylesheet" />
<link href="../../../../lib/template.css" media="screen" type="text/css" rel="stylesheet" />
<link href="../../../../lib/diagrams.css" media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
<script type="text/javascript" src="../../../../lib/jquery.min.js"></script>
<script type="text/javascript" src="../../../../lib/jquery.panzoom.min.js"></script>
<script type="text/javascript" src="../../../../lib/jquery.mousewheel.min.js"></script>
<script type="text/javascript" src="../../../../lib/index.js"></script>
<script type="text/javascript" src="../../../../index.js"></script>
<script type="text/javascript" src="../../../../lib/scheduler.js"></script>
<script type="text/javascript" src="../../../../lib/template.js"></script>
<script type="text/javascript">
/* this variable can be used by the JS to determine the path to the root document */
var toRoot = '../../../../';
</script>
</head>
<body>
<div id="search">
<span id="doc-title">Spark 3.5.3 ScalaDoc<span id="doc-version"></span></span>
<span class="close-results"><span class="left">&lt;</span> Back</span>
<div id="textfilter">
<span class="input">
<input autocapitalize="none" placeholder="Search" id="index-input" type="text" accesskey="/" />
<i class="clear material-icons"></i>
<i id="search-icon" class="material-icons"></i>
</span>
</div>
</div>
<div id="search-results">
<div id="search-progress">
<div id="progress-fill"></div>
</div>
<div id="results-content">
<div id="entity-results"></div>
<div id="member-results"></div>
</div>
</div>
<div id="content-scroll-container" style="-webkit-overflow-scrolling: touch;">
<div id="content-container" style="-webkit-overflow-scrolling: touch;">
<div id="subpackage-spacer">
<div id="packages">
<h1>Packages</h1>
<ul>
<li name="_root_.root" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="_root_"></a><a id="root:_root_"></a>
<span class="permalink">
<a href="../../../../index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="../../../../index.html"><span class="name">root</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../../../../index.html" class="extype" name="_root_">root</a></dd></dl></div>
</li><li name="_root_.org" visbl="pub" class="indented1 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="org"></a><a id="org:org"></a>
<span class="permalink">
<a href="../../../../org/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="../../../index.html"><span class="name">org</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../../../../index.html" class="extype" name="_root_">root</a></dd></dl></div>
</li><li name="org.apache" visbl="pub" class="indented2 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="apache"></a><a id="apache:apache"></a>
<span class="permalink">
<a href="../../../../org/apache/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="../../index.html"><span class="name">apache</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../../../index.html" class="extype" name="org">org</a></dd></dl></div>
</li><li name="org.apache.spark" visbl="pub" class="indented3 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="spark"></a><a id="spark:spark"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="Core Spark functionality." href="../index.html"><span class="name">spark</span></a>
</span>
<p class="shortcomment cmt">Core Spark functionality.</p><div class="fullcomment"><div class="comment cmt"><p>Core Spark functionality. <a href="../SparkContext.html" class="extype" name="org.apache.spark.SparkContext">org.apache.spark.SparkContext</a> serves as the main entry point to
Spark, while <a href="../rdd/RDD.html" class="extype" name="org.apache.spark.rdd.RDD">org.apache.spark.rdd.RDD</a> is the data type representing a distributed collection,
and provides most parallel operations.</p><p>In addition, <a href="../rdd/PairRDDFunctions.html" class="extype" name="org.apache.spark.rdd.PairRDDFunctions">org.apache.spark.rdd.PairRDDFunctions</a> contains operations available only on RDDs
of key-value pairs, such as <code>groupByKey</code> and <code>join</code>; <a href="../rdd/DoubleRDDFunctions.html" class="extype" name="org.apache.spark.rdd.DoubleRDDFunctions">org.apache.spark.rdd.DoubleRDDFunctions</a>
contains operations available only on RDDs of Doubles; and
<a href="../rdd/SequenceFileRDDFunctions.html" class="extype" name="org.apache.spark.rdd.SequenceFileRDDFunctions">org.apache.spark.rdd.SequenceFileRDDFunctions</a> contains operations available on RDDs that can
be saved as SequenceFiles. These operations are automatically available on any RDD of the right
type (e.g. RDD[(Int, Int)] through implicit conversions.</p><p>Java programmers should reference the <a href="../api/java/index.html" class="extype" name="org.apache.spark.api.java">org.apache.spark.api.java</a> package
for Spark programming APIs in Java.</p><p>Classes and methods marked with <span class="experimental badge" style="float: none;">
Experimental</span> are user-facing features which have not been officially adopted by the
Spark project. These are subject to change or removal in minor releases.</p><p>Classes and methods marked with <span class="developer badge" style="float: none;">
Developer API</span> are intended for advanced users want to extend Spark through lower
level interfaces. These are subject to changes or removal in minor releases.
</p></div><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../../index.html" class="extype" name="org.apache">apache</a></dd></dl></div>
</li><li name="org.apache.spark.sql" visbl="pub" class="indented4 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="sql"></a><a id="sql:sql"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="Allows the execution of relational queries, including those expressed in SQL using Spark." href="index.html"><span class="name">sql</span></a>
</span>
<p class="shortcomment cmt">Allows the execution of relational queries, including those expressed in SQL using Spark.</p><div class="fullcomment"><div class="comment cmt"><p>Allows the execution of relational queries, including those expressed in SQL using Spark.
</p></div><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../index.html" class="extype" name="org.apache.spark">spark</a></dd></dl></div>
</li><li name="org.apache.spark.sql.api" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="api"></a><a id="api:api"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/api/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="Contains API classes that are specific to a single language (i.e." href="api/index.html"><span class="name">api</span></a>
</span>
<p class="shortcomment cmt">Contains API classes that are specific to a single language (i.e.</p><div class="fullcomment"><div class="comment cmt"><p>Contains API classes that are specific to a single language (i.e. Java).
</p></div><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.avro" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="avro"></a><a id="avro:avro"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/avro/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="avro/index.html"><span class="name">avro</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.catalog" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="catalog"></a><a id="catalog:catalog"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/catalog/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="catalog/index.html"><span class="name">catalog</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.columnar" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="columnar"></a><a id="columnar:columnar"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/columnar/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="columnar/index.html"><span class="name">columnar</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.connector" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="connector"></a><a id="connector:connector"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/connector/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="connector/index.html"><span class="name">connector</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.expressions" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="expressions"></a><a id="expressions:expressions"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/expressions/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="expressions/index.html"><span class="name">expressions</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.jdbc" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="jdbc"></a><a id="jdbc:jdbc"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/jdbc/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="jdbc/index.html"><span class="name">jdbc</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.sources" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="sources"></a><a id="sources:sources"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/sources/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="A set of APIs for adding data sources to Spark SQL." href="sources/index.html"><span class="name">sources</span></a>
</span>
<p class="shortcomment cmt">A set of APIs for adding data sources to Spark SQL.</p><div class="fullcomment"><div class="comment cmt"><p>A set of APIs for adding data sources to Spark SQL.
</p></div><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.streaming" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="streaming"></a><a id="streaming:streaming"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/streaming/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="streaming/index.html"><span class="name">streaming</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.types" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="types"></a><a id="types:types"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/types/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="Contains a type system for attributes produced by relations, including complex types like structs, arrays and maps." href="types/index.html"><span class="name">types</span></a>
</span>
<p class="shortcomment cmt">Contains a type system for attributes produced by relations, including complex types like
structs, arrays and maps.</p><div class="fullcomment"><div class="comment cmt"><p>Contains a type system for attributes produced by relations, including complex types like
structs, arrays and maps.
</p></div><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.util" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="util"></a><a id="util:util"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/util/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="util/index.html"><span class="name">util</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li name="org.apache.spark.sql.vectorized" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="vectorized"></a><a id="vectorized:vectorized"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/vectorized/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="vectorized/index.html"><span class="name">vectorized</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></dd></dl></div>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="AnalysisException.html" title="Thrown when a query fails to analyze, usually because the query itself is invalid."></a>
<a href="AnalysisException.html" title="Thrown when a query fails to analyze, usually because the query itself is invalid.">AnalysisException</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="Column.html" title="A column that will be computed based on the data in a DataFrame."></a>
<a href="Column.html" title="A column that will be computed based on the data in a DataFrame.">Column</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="ColumnName.html" title="A convenient class used for constructing schema."></a>
<a href="ColumnName.html" title="A convenient class used for constructing schema.">ColumnName</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="trait" href="CreateTableWriter.html" title="Trait to restrict calls to create and replace operations."></a>
<a href="CreateTableWriter.html" title="Trait to restrict calls to create and replace operations.">CreateTableWriter</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="DataFrameNaFunctions.html" title="Functionality for working with missing data in DataFrames."></a>
<a href="DataFrameNaFunctions.html" title="Functionality for working with missing data in DataFrames.">DataFrameNaFunctions</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="DataFrameReader.html" title="Interface used to load a Dataset from external storage systems (e.g."></a>
<a href="DataFrameReader.html" title="Interface used to load a Dataset from external storage systems (e.g.">DataFrameReader</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="DataFrameStatFunctions.html" title="Statistic functions for DataFrames."></a>
<a href="DataFrameStatFunctions.html" title="Statistic functions for DataFrames.">DataFrameStatFunctions</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="" title="Interface used to write a Dataset to external storage systems (e.g."></a>
<a href="" title="Interface used to write a Dataset to external storage systems (e.g.">DataFrameWriter</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="DataFrameWriterV2.html" title="Interface used to write a org.apache.spark.sql.Dataset to external storage using the v2 API."></a>
<a href="DataFrameWriterV2.html" title="Interface used to write a org.apache.spark.sql.Dataset to external storage using the v2 API.">DataFrameWriterV2</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="Dataset.html" title="A Dataset is a strongly typed collection of domain-specific objects that can be transformed in parallel using functional or relational operations."></a>
<a href="Dataset.html" title="A Dataset is a strongly typed collection of domain-specific objects that can be transformed in parallel using functional or relational operations.">Dataset</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="DatasetHolder.html" title="A container for a Dataset, used for implicit conversions in Scala."></a>
<a href="DatasetHolder.html" title="A container for a Dataset, used for implicit conversions in Scala.">DatasetHolder</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="trait" href="Encoder.html" title="Used to convert a JVM object of type T to and from the internal Spark SQL representation."></a>
<a href="Encoder.html" title="Used to convert a JVM object of type T to and from the internal Spark SQL representation.">Encoder</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="object" href="Encoders$.html" title="Methods for creating an Encoder."></a>
<a href="Encoders$.html" title="Methods for creating an Encoder.">Encoders</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="ExperimentalMethods.html" title=":: Experimental :: Holder for experimental methods for the bravest."></a>
<a href="ExperimentalMethods.html" title=":: Experimental :: Holder for experimental methods for the bravest.">ExperimentalMethods</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="ForeachWriter.html" title="The abstract class for writing custom logic to process data generated by a query."></a>
<a href="ForeachWriter.html" title="The abstract class for writing custom logic to process data generated by a query.">ForeachWriter</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="KeyValueGroupedDataset.html" title="A Dataset has been logically grouped by a user specified grouping key."></a>
<a href="KeyValueGroupedDataset.html" title="A Dataset has been logically grouped by a user specified grouping key.">KeyValueGroupedDataset</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="trait" href="LowPrioritySQLImplicits.html" title="Lower priority implicit methods for converting Scala objects into Datasets."></a>
<a href="LowPrioritySQLImplicits.html" title="Lower priority implicit methods for converting Scala objects into Datasets.">LowPrioritySQLImplicits</a>
</li><li class="current-entities indented4">
<a class="object" href="Observation$.html" title="(Scala-specific) Create instances of Observation via Scala apply."></a>
<a class="class" href="Observation.html" title="Helper class to simplify usage of Dataset.observe(String, Column, Column*):"></a>
<a href="Observation.html" title="Helper class to simplify usage of Dataset.observe(String, Column, Column*):">Observation</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="RelationalGroupedDataset.html" title="A set of methods for aggregations on a DataFrame, created by groupBy, cube or rollup (and also pivot)."></a>
<a href="RelationalGroupedDataset.html" title="A set of methods for aggregations on a DataFrame, created by groupBy, cube or rollup (and also pivot).">RelationalGroupedDataset</a>
</li><li class="current-entities indented4">
<a class="object" href="Row$.html" title=""></a>
<a class="trait" href="Row.html" title="Represents one row of output from a relational operator."></a>
<a href="Row.html" title="Represents one row of output from a relational operator.">Row</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="RowFactory.html" title=""></a>
<a href="RowFactory.html" title="">RowFactory</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="RuntimeConfig.html" title="Runtime configuration interface for Spark."></a>
<a href="RuntimeConfig.html" title="Runtime configuration interface for Spark.">RuntimeConfig</a>
</li><li class="current-entities indented4">
<a class="object" href="SQLContext$.html" title="This SQLContext object contains utility functions to create a singleton SQLContext instance, or to get the created SQLContext instance."></a>
<a class="class" href="SQLContext.html" title="The entry point for working with structured data (rows and columns) in Spark 1.x."></a>
<a href="SQLContext.html" title="The entry point for working with structured data (rows and columns) in Spark 1.x.">SQLContext</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="SQLImplicits.html" title="A collection of implicit methods for converting common Scala objects into Datasets."></a>
<a href="SQLImplicits.html" title="A collection of implicit methods for converting common Scala objects into Datasets.">SQLImplicits</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="SaveMode.html" title=""></a>
<a href="SaveMode.html" title="">SaveMode</a>
</li><li class="current-entities indented4">
<a class="object" href="SparkSession$.html" title=""></a>
<a class="class" href="SparkSession.html" title="The entry point to programming Spark with the Dataset and DataFrame API."></a>
<a href="SparkSession.html" title="The entry point to programming Spark with the Dataset and DataFrame API.">SparkSession</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="SparkSessionExtensions.html" title=":: Experimental :: Holder for injection points to the SparkSession."></a>
<a href="SparkSessionExtensions.html" title=":: Experimental :: Holder for injection points to the SparkSession.">SparkSessionExtensions</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="trait" href="SparkSessionExtensionsProvider.html" title=":: Unstable ::"></a>
<a href="SparkSessionExtensionsProvider.html" title=":: Unstable ::">SparkSessionExtensionsProvider</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="TypedColumn.html" title="A Column where an Encoder has been given for the expected input and return type."></a>
<a href="TypedColumn.html" title="A Column where an Encoder has been given for the expected input and return type.">TypedColumn</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="UDFRegistration.html" title="Functions for registering user-defined functions."></a>
<a href="UDFRegistration.html" title="Functions for registering user-defined functions.">UDFRegistration</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="class" href="UDTFRegistration.html" title="Functions for registering user-defined table functions."></a>
<a href="UDTFRegistration.html" title="Functions for registering user-defined table functions.">UDTFRegistration</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="trait" href="WriteConfigMethods.html" title="Configuration methods common to create/replace operations and insert/overwrite operations."></a>
<a href="WriteConfigMethods.html" title="Configuration methods common to create/replace operations and insert/overwrite operations.">WriteConfigMethods</a>
</li><li class="current-entities indented4">
<span class="separator"></span>
<a class="object" href="functions$.html" title="Commonly used functions available for DataFrame operations."></a>
<a href="functions$.html" title="Commonly used functions available for DataFrame operations.">functions</a>
</li>
</ul>
</div>
</div>
<div id="content">
<body class="class type">
<div id="definition">
<div class="big-circle class">c</div>
<p id="owner"><a href="../../../index.html" class="extype" name="org">org</a>.<a href="../../index.html" class="extype" name="org.apache">apache</a>.<a href="../index.html" class="extype" name="org.apache.spark">spark</a>.<a href="index.html" class="extype" name="org.apache.spark.sql">sql</a></p>
<h1>DataFrameWriter<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span></h1>
<h3><span class="morelinks"></span></h3>
</div>
<h4 id="signature" class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">class</span>
</span>
<span class="symbol">
<span class="name">DataFrameWriter</span><span class="tparams">[<span name="T">T</span>]</span><span class="result"> extends <a href="../../../../scala/index.html#AnyRef=Object" class="extmbr" name="scala.AnyRef">AnyRef</a></span>
</span>
</h4>
<div id="comment" class="fullcommenttop"><div class="comment cmt"><p>Interface used to write a <a href="Dataset.html" class="extype" name="org.apache.spark.sql.Dataset">Dataset</a> to external storage systems (e.g. file systems,
key-value stores, etc). Use <code>Dataset.write</code> to access this.
</p></div><dl class="attributes block"> <dt>Annotations</dt><dd>
<span class="name">@Stable</span><span class="args">()</span>
</dd><dt>Source</dt><dd><a href="https://github.com/apache/spark/tree/v3.5.3/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala" target="_blank">DataFrameWriter.scala</a></dd><dt>Since</dt><dd><p>1.4.0</p></dd></dl><div class="toggleContainer block">
<span class="toggle">
Linear Supertypes
</span>
<div class="superTypes hiddenContent"><a href="../../../../scala/index.html#AnyRef=Object" class="extmbr" name="scala.AnyRef">AnyRef</a>, <span class="extype" name="scala.Any">Any</span></div>
</div></div>
<div id="mbrsel">
<div class="toggle"></div>
<div id="memberfilter">
<i class="material-icons arrow"></i>
<span class="input">
<input id="mbrsel-input" placeholder="Filter all members" type="text" accesskey="/" />
</span>
<i class="clear material-icons"></i>
</div>
<div id="filterby">
<div id="order">
<span class="filtertype">Ordering</span>
<ol>
<li class="alpha in"><span>Alphabetic</span></li>
<li class="inherit out"><span>By Inheritance</span></li>
</ol>
</div>
<div class="ancestors">
<span class="filtertype">Inherited<br />
</span>
<ol id="linearization">
<li class="in" name="org.apache.spark.sql.DataFrameWriter"><span>DataFrameWriter</span></li><li class="in" name="scala.AnyRef"><span>AnyRef</span></li><li class="in" name="scala.Any"><span>Any</span></li>
</ol>
</div><div class="ancestors">
<span class="filtertype"></span>
<ol>
<li class="hideall out"><span>Hide All</span></li>
<li class="showall in"><span>Show All</span></li>
</ol>
</div>
<div id="visbl">
<span class="filtertype">Visibility</span>
<ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
</div>
</div>
</div>
<div id="template">
<div id="allMembers">
<div class="values members">
<h3>Value Members</h3>
<ol>
<li name="scala.AnyRef#!=" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="!=(x$1:Any):Boolean"></a><a id="!=(Any):Boolean"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#!=(x$1:Any):Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $bang$eq" class="name">!=</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef###" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="##():Int"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html###():Int" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $hash$hash" class="name">##</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#==" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="==(x$1:Any):Boolean"></a><a id="==(Any):Boolean"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#==(x$1:Any):Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $eq$eq" class="name">==</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.Any#asInstanceOf" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="asInstanceOf[T0]:T0"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#asInstanceOf[T0]:T0" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">asInstanceOf</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="result">: <span class="extype" name="scala.Any.asInstanceOf.T0">T0</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#bucketBy" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="bucketBy(numBuckets:Int,colName:String,colNames:String*):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="bucketBy(Int,String,String*):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#bucketBy(numBuckets:Int,colName:String,colNames:String*):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">bucketBy</span><span class="params">(<span name="numBuckets">numBuckets: <span class="extype" name="scala.Int">Int</span></span>, <span name="colName">colName: <span class="extype" name="scala.Predef.String">String</span></span>, <span name="colNames">colNames: <span class="extype" name="scala.Predef.String">String</span>*</span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Buckets the output by the given columns.</p><div class="fullcomment"><div class="comment cmt"><p>Buckets the output by the given columns. If specified, the output is laid out on the file
system similar to Hive's bucketing scheme, but with a different bucket hash function
and is not compatible with Hive's bucketing.</p><p>This is applicable for all file-based data sources (e.g. Parquet, JSON) starting with Spark
2.1.0.
</p></div><dl class="attributes block"> <dt>Annotations</dt><dd>
<span class="name">@varargs</span><span class="args">()</span>
</dd><dt>Since</dt><dd><p>2.0</p></dd></dl></div>
</li><li name="scala.AnyRef#clone" visbl="prt" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="clone():Object"></a><a id="clone():AnyRef"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#clone():Object" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">clone</span><span class="params">()</span><span class="result">: <a href="../../../../scala/index.html#AnyRef=Object" class="extmbr" name="scala.AnyRef">AnyRef</a></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[<span class="extype" name="java.lang">lang</span>] </dd><dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.CloneNotSupportedException]">...</span>
</span>)</span>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#csv" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="csv(path:String):Unit"></a><a id="csv(String):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#csv(path:String):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">csv</span><span class="params">(<span name="path">path: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<p class="shortcomment cmt">Saves the content of the <code>DataFrame</code> in CSV format at the specified path.</p><div class="fullcomment"><div class="comment cmt"><p>Saves the content of the <code>DataFrame</code> in CSV format at the specified path.
This is equivalent to:</p><pre>format(<span class="lit">"csv"</span>).save(path)</pre><p>You can find the CSV-specific options for writing CSV files in
<a href="https://spark.apache.org/docs/latest/sql-data-sources-csv.html#data-source-option">
Data Source Option</a> in the version you use.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>2.0.0</p></dd></dl></div>
</li><li name="scala.AnyRef#eq" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="eq(x$1:AnyRef):Boolean"></a><a id="eq(AnyRef):Boolean"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#eq(x$1:AnyRef):Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">eq</span><span class="params">(<span name="arg0">arg0: <a href="../../../../scala/index.html#AnyRef=Object" class="extmbr" name="scala.AnyRef">AnyRef</a></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#equals" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="equals(x$1:Any):Boolean"></a><a id="equals(Any):Boolean"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#equals(x$1:Any):Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">equals</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#finalize" visbl="prt" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="finalize():Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#finalize():Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">finalize</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[<span class="extype" name="java.lang">lang</span>] </dd><dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="symbol">classOf[java.lang.Throwable]</span>
</span>)</span>
</dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#format" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="format(source:String):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="format(String):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#format(source:String):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">format</span><span class="params">(<span name="source">source: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Specifies the underlying output data source.</p><div class="fullcomment"><div class="comment cmt"><p>Specifies the underlying output data source. Built-in options include &quot;parquet&quot;, &quot;json&quot;, etc.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="scala.AnyRef#getClass" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="getClass():Class[_]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#getClass():Class[_]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">getClass</span><span class="params">()</span><span class="result">: <span class="extype" name="java.lang.Class">Class</span>[_]</span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd><dt>Annotations</dt><dd>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#hashCode" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="hashCode():Int"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#hashCode():Int" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">hashCode</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd><dt>Annotations</dt><dd>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#insertInto" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="insertInto(tableName:String):Unit"></a><a id="insertInto(String):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#insertInto(tableName:String):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">insertInto</span><span class="params">(<span name="tableName">tableName: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<p class="shortcomment cmt">Inserts the content of the <code>DataFrame</code> to the specified table.</p><div class="fullcomment"><div class="comment cmt"><p>Inserts the content of the <code>DataFrame</code> to the specified table. It requires that
the schema of the <code>DataFrame</code> is the same as the schema of the table.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd><dt>Note</dt><dd><span class="cmt"><p>Unlike <code>saveAsTable</code>, <code>insertInto</code> ignores the column names and just uses position-based
resolution. For example:</p></span>, <span class="cmt"><p>SaveMode.ErrorIfExists and SaveMode.Ignore behave as SaveMode.Append in <code>insertInto</code> as
<code>insertInto</code> is not a table creating operation.</p><pre>scala&gt; <span class="std">Seq</span>((<span class="num">1</span>, <span class="num">2</span>)).toDF(<span class="lit">"i"</span>, <span class="lit">"j"</span>).write.mode(<span class="lit">"overwrite"</span>).saveAsTable(<span class="lit">"t1"</span>)
scala&gt; <span class="std">Seq</span>((<span class="num">3</span>, <span class="num">4</span>)).toDF(<span class="lit">"j"</span>, <span class="lit">"i"</span>).write.insertInto(<span class="lit">"t1"</span>)
scala&gt; <span class="std">Seq</span>((<span class="num">5</span>, <span class="num">6</span>)).toDF(<span class="lit">"a"</span>, <span class="lit">"b"</span>).write.insertInto(<span class="lit">"t1"</span>)
scala&gt; sql(<span class="lit">"select * from t1"</span>).show
+---+---+
| i| j|
+---+---+
| <span class="num">5</span>| <span class="num">6</span>|
| <span class="num">3</span>| <span class="num">4</span>|
| <span class="num">1</span>| <span class="num">2</span>|
+---+---+</pre><p>Because it inserts data to an existing table, format or options will be ignored.</p></span></dd></dl></div>
</li><li name="scala.Any#isInstanceOf" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="isInstanceOf[T0]:Boolean"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#isInstanceOf[T0]:Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">isInstanceOf</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#jdbc" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="jdbc(url:String,table:String,connectionProperties:java.util.Properties):Unit"></a><a id="jdbc(String,String,Properties):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#jdbc(url:String,table:String,connectionProperties:java.util.Properties):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">jdbc</span><span class="params">(<span name="url">url: <span class="extype" name="scala.Predef.String">String</span></span>, <span name="table">table: <span class="extype" name="scala.Predef.String">String</span></span>, <span name="connectionProperties">connectionProperties: <span class="extype" name="java.util.Properties">Properties</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<p class="shortcomment cmt">Saves the content of the <code>DataFrame</code> to an external database table via JDBC.</p><div class="fullcomment"><div class="comment cmt"><p>Saves the content of the <code>DataFrame</code> to an external database table via JDBC. In the case the
table already exists in the external database, behavior of this function depends on the
save mode, specified by the <code>mode</code> function (default to throwing an exception).</p><p>Don't create too many partitions in parallel on a large cluster; otherwise Spark might crash
your external database systems.</p><p>JDBC-specific option and parameter documentation for storing tables via JDBC in
<a href="https://spark.apache.org/docs/latest/sql-data-sources-jdbc.html#data-source-option">
Data Source Option</a> in the version you use.
</p></div><dl class="paramcmts block"><dt class="param">table</dt><dd class="cmt"><p>Name of the table in the external database.</p></dd><dt class="param">connectionProperties</dt><dd class="cmt"><p>JDBC database connection arguments, a list of arbitrary string
tag/value. Normally at least a &quot;user&quot; and &quot;password&quot; property
should be included. &quot;batchsize&quot; can be used to control the
number of rows per insert. &quot;isolationLevel&quot; can be one of
&quot;NONE&quot;, &quot;READ_COMMITTED&quot;, &quot;READ_UNCOMMITTED&quot;, &quot;REPEATABLE_READ&quot;,
or &quot;SERIALIZABLE&quot;, corresponding to standard transaction
isolation levels defined by JDBC's Connection object, with default
of &quot;READ_UNCOMMITTED&quot;.</p></dd></dl><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#json" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="json(path:String):Unit"></a><a id="json(String):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#json(path:String):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">json</span><span class="params">(<span name="path">path: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<p class="shortcomment cmt">Saves the content of the <code>DataFrame</code> in JSON format (<a href="http://jsonlines.org/">
JSON Lines text format or newline-delimited JSON</a>) at the specified path.</p><div class="fullcomment"><div class="comment cmt"><p>Saves the content of the <code>DataFrame</code> in JSON format (<a href="http://jsonlines.org/">
JSON Lines text format or newline-delimited JSON</a>) at the specified path.
This is equivalent to:</p><pre>format(<span class="lit">"json"</span>).save(path)</pre><p>You can find the JSON-specific options for writing JSON files in
<a href="https://spark.apache.org/docs/latest/sql-data-sources-json.html#data-source-option">
Data Source Option</a> in the version you use.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#mode" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="mode(saveMode:String):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="mode(String):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#mode(saveMode:String):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">mode</span><span class="params">(<span name="saveMode">saveMode: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Specifies the behavior when data or table already exists.</p><div class="fullcomment"><div class="comment cmt"><p>Specifies the behavior when data or table already exists. Options include:</p><ul><li><code>overwrite</code>: overwrite the existing data.</li><li><code>append</code>: append the data.</li><li><code>ignore</code>: ignore the operation (i.e. no-op).</li><li><code>error</code> or <code>errorifexists</code>: default option, throw an exception at runtime.</li></ul></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#mode" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="mode(saveMode:org.apache.spark.sql.SaveMode):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="mode(SaveMode):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#mode(saveMode:org.apache.spark.sql.SaveMode):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">mode</span><span class="params">(<span name="saveMode">saveMode: <a href="SaveMode.html" class="extype" name="org.apache.spark.sql.SaveMode">SaveMode</a></span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Specifies the behavior when data or table already exists.</p><div class="fullcomment"><div class="comment cmt"><p>Specifies the behavior when data or table already exists. Options include:</p><ul><li><code>SaveMode.Overwrite</code>: overwrite the existing data.</li><li><code>SaveMode.Append</code>: append the data.</li><li><code>SaveMode.Ignore</code>: ignore the operation (i.e. no-op).</li><li><code>SaveMode.ErrorIfExists</code>: throw an exception at runtime.</li></ul><p>The default option is <code>ErrorIfExists</code>.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="scala.AnyRef#ne" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="ne(x$1:AnyRef):Boolean"></a><a id="ne(AnyRef):Boolean"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#ne(x$1:AnyRef):Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">ne</span><span class="params">(<span name="arg0">arg0: <a href="../../../../scala/index.html#AnyRef=Object" class="extmbr" name="scala.AnyRef">AnyRef</a></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#notify" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="notify():Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#notify():Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">notify</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#notifyAll" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="notifyAll():Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#notifyAll():Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">notifyAll</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#option" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="option(key:String,value:Double):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="option(String,Double):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#option(key:String,value:Double):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">option</span><span class="params">(<span name="key">key: <span class="extype" name="scala.Predef.String">String</span></span>, <span name="value">value: <span class="extype" name="scala.Double">Double</span></span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Adds an output option for the underlying data source.</p><div class="fullcomment"><div class="comment cmt"><p>Adds an output option for the underlying data source.</p><p>All options are maintained in a case-insensitive way in terms of key names.
If a new option has the same key case-insensitively, it will override the existing option.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>2.0.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#option" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="option(key:String,value:Long):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="option(String,Long):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#option(key:String,value:Long):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">option</span><span class="params">(<span name="key">key: <span class="extype" name="scala.Predef.String">String</span></span>, <span name="value">value: <span class="extype" name="scala.Long">Long</span></span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Adds an output option for the underlying data source.</p><div class="fullcomment"><div class="comment cmt"><p>Adds an output option for the underlying data source.</p><p>All options are maintained in a case-insensitive way in terms of key names.
If a new option has the same key case-insensitively, it will override the existing option.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>2.0.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#option" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="option(key:String,value:Boolean):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="option(String,Boolean):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#option(key:String,value:Boolean):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">option</span><span class="params">(<span name="key">key: <span class="extype" name="scala.Predef.String">String</span></span>, <span name="value">value: <span class="extype" name="scala.Boolean">Boolean</span></span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Adds an output option for the underlying data source.</p><div class="fullcomment"><div class="comment cmt"><p>Adds an output option for the underlying data source.</p><p>All options are maintained in a case-insensitive way in terms of key names.
If a new option has the same key case-insensitively, it will override the existing option.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>2.0.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#option" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="option(key:String,value:String):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="option(String,String):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#option(key:String,value:String):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">option</span><span class="params">(<span name="key">key: <span class="extype" name="scala.Predef.String">String</span></span>, <span name="value">value: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Adds an output option for the underlying data source.</p><div class="fullcomment"><div class="comment cmt"><p>Adds an output option for the underlying data source.</p><p>All options are maintained in a case-insensitive way in terms of key names.
If a new option has the same key case-insensitively, it will override the existing option.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#options" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="options(options:java.util.Map[String,String]):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="options(Map[String,String]):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#options(options:java.util.Map[String,String]):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">options</span><span class="params">(<span name="options">options: <span class="extype" name="java.util.Map">Map</span>[<span class="extype" name="scala.Predef.String">String</span>, <span class="extype" name="scala.Predef.String">String</span>]</span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Adds output options for the underlying data source.</p><div class="fullcomment"><div class="comment cmt"><p>Adds output options for the underlying data source.</p><p>All options are maintained in a case-insensitive way in terms of key names.
If a new option has the same key case-insensitively, it will override the existing option.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#options" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="options(options:scala.collection.Map[String,String]):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="options(Map[String,String]):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#options(options:scala.collection.Map[String,String]):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">options</span><span class="params">(<span name="options">options: <span class="extype" name="scala.collection.Map">Map</span>[<span class="extype" name="scala.Predef.String">String</span>, <span class="extype" name="scala.Predef.String">String</span>]</span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">(Scala-specific) Adds output options for the underlying data source.</p><div class="fullcomment"><div class="comment cmt"><p>(Scala-specific) Adds output options for the underlying data source.</p><p>All options are maintained in a case-insensitive way in terms of key names.
If a new option has the same key case-insensitively, it will override the existing option.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#orc" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="orc(path:String):Unit"></a><a id="orc(String):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#orc(path:String):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">orc</span><span class="params">(<span name="path">path: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<p class="shortcomment cmt">Saves the content of the <code>DataFrame</code> in ORC format at the specified path.</p><div class="fullcomment"><div class="comment cmt"><p>Saves the content of the <code>DataFrame</code> in ORC format at the specified path.
This is equivalent to:</p><pre>format(<span class="lit">"orc"</span>).save(path)</pre><p>ORC-specific option(s) for writing ORC files can be found in
<a href=
"https://spark.apache.org/docs/latest/sql-data-sources-orc.html#data-source-option">
Data Source Option</a> in the version you use.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.5.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#parquet" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="parquet(path:String):Unit"></a><a id="parquet(String):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#parquet(path:String):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">parquet</span><span class="params">(<span name="path">path: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<p class="shortcomment cmt">Saves the content of the <code>DataFrame</code> in Parquet format at the specified path.</p><div class="fullcomment"><div class="comment cmt"><p>Saves the content of the <code>DataFrame</code> in Parquet format at the specified path.
This is equivalent to:</p><pre>format(<span class="lit">"parquet"</span>).save(path)</pre><p>Parquet-specific option(s) for writing Parquet files can be found in
<a href=
"https://spark.apache.org/docs/latest/sql-data-sources-parquet.html#data-source-option">
Data Source Option</a> in the version you use.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#partitionBy" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="partitionBy(colNames:String*):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="partitionBy(String*):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#partitionBy(colNames:String*):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">partitionBy</span><span class="params">(<span name="colNames">colNames: <span class="extype" name="scala.Predef.String">String</span>*</span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Partitions the output by the given columns on the file system.</p><div class="fullcomment"><div class="comment cmt"><p>Partitions the output by the given columns on the file system. If specified, the output is
laid out on the file system similar to Hive's partitioning scheme. As an example, when we
partition a dataset by year and then month, the directory layout would look like:</p><ul><li>year=2016/month=01/</li><li>year=2016/month=02/</li></ul><p>Partitioning is one of the most widely used techniques to optimize physical data layout.
It provides a coarse-grained index for skipping unnecessary data reads when queries have
predicates on the partitioned columns. In order for partitioning to work well, the number
of distinct values in each column should typically be less than tens of thousands.</p><p>This is applicable for all file-based data sources (e.g. Parquet, JSON) starting with Spark
2.1.0.
</p></div><dl class="attributes block"> <dt>Annotations</dt><dd>
<span class="name">@varargs</span><span class="args">()</span>
</dd><dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#save" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="save():Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#save():Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">save</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<p class="shortcomment cmt">Saves the content of the <code>DataFrame</code> as the specified table.</p><div class="fullcomment"><div class="comment cmt"><p>Saves the content of the <code>DataFrame</code> as the specified table.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#save" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="save(path:String):Unit"></a><a id="save(String):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#save(path:String):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">save</span><span class="params">(<span name="path">path: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<p class="shortcomment cmt">Saves the content of the <code>DataFrame</code> at the specified path.</p><div class="fullcomment"><div class="comment cmt"><p>Saves the content of the <code>DataFrame</code> at the specified path.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#saveAsTable" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="saveAsTable(tableName:String):Unit"></a><a id="saveAsTable(String):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#saveAsTable(tableName:String):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">saveAsTable</span><span class="params">(<span name="tableName">tableName: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<p class="shortcomment cmt">Saves the content of the <code>DataFrame</code> as the specified table.</p><div class="fullcomment"><div class="comment cmt"><p>Saves the content of the <code>DataFrame</code> as the specified table.</p><p>In the case the table already exists, behavior of this function depends on the
save mode, specified by the <code>mode</code> function (default to throwing an exception).
When <code>mode</code> is <code>Overwrite</code>, the schema of the <code>DataFrame</code> does not need to be
the same as that of the existing table.</p><p>When <code>mode</code> is <code>Append</code>, if there is an existing table, we will use the format and options of
the existing table. The column order in the schema of the <code>DataFrame</code> doesn't need to be same
as that of the existing table. Unlike <code>insertInto</code>, <code>saveAsTable</code> will use the column names to
find the correct column positions. For example:</p><pre>scala&gt; <span class="std">Seq</span>((<span class="num">1</span>, <span class="num">2</span>)).toDF(<span class="lit">"i"</span>, <span class="lit">"j"</span>).write.mode(<span class="lit">"overwrite"</span>).saveAsTable(<span class="lit">"t1"</span>)
scala&gt; <span class="std">Seq</span>((<span class="num">3</span>, <span class="num">4</span>)).toDF(<span class="lit">"j"</span>, <span class="lit">"i"</span>).write.mode(<span class="lit">"append"</span>).saveAsTable(<span class="lit">"t1"</span>)
scala&gt; sql(<span class="lit">"select * from t1"</span>).show
+---+---+
| i| j|
+---+---+
| <span class="num">1</span>| <span class="num">2</span>|
| <span class="num">4</span>| <span class="num">3</span>|
+---+---+</pre><p>In this method, save mode is used to determine the behavior if the data source table exists in
Spark catalog. We will always overwrite the underlying data of data source (e.g. a table in
JDBC data source) if the table doesn't exist in Spark catalog, and will always append to the
underlying data of data source if the table already exists.</p><p>When the DataFrame is created from a non-partitioned <code>HadoopFsRelation</code> with a single input
path, and the data source provider can be mapped to an existing Hive builtin SerDe (i.e. ORC
and Parquet), the table is persisted in a Hive compatible format, which means other systems
like Hive will be able to read this table. Otherwise, the table is persisted in a Spark SQL
specific format.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.4.0</p></dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#sortBy" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="sortBy(colName:String,colNames:String*):org.apache.spark.sql.DataFrameWriter[T]"></a><a id="sortBy(String,String*):DataFrameWriter[T]"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#sortBy(colName:String,colNames:String*):org.apache.spark.sql.DataFrameWriter[T]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">sortBy</span><span class="params">(<span name="colName">colName: <span class="extype" name="scala.Predef.String">String</span></span>, <span name="colNames">colNames: <span class="extype" name="scala.Predef.String">String</span>*</span>)</span><span class="result">: <a href="" class="extype" name="org.apache.spark.sql.DataFrameWriter">DataFrameWriter</a>[<span class="extype" name="org.apache.spark.sql.DataFrameWriter.T">T</span>]</span>
</span>
<p class="shortcomment cmt">Sorts the output in each bucket by the given columns.</p><div class="fullcomment"><div class="comment cmt"><p>Sorts the output in each bucket by the given columns.</p><p>This is applicable for all file-based data sources (e.g. Parquet, JSON) starting with Spark
2.1.0.
</p></div><dl class="attributes block"> <dt>Annotations</dt><dd>
<span class="name">@varargs</span><span class="args">()</span>
</dd><dt>Since</dt><dd><p>2.0</p></dd></dl></div>
</li><li name="scala.AnyRef#synchronized" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="synchronized[T0](x$1:=&gt;T0):T0"></a><a id="synchronized[T0](⇒T0):T0"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#synchronized[T0](x$1:=&gt;T0):T0" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">synchronized</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="params">(<span name="arg0">arg0: ⇒ <span class="extype" name="java.lang.AnyRef.synchronized.T0">T0</span></span>)</span><span class="result">: <span class="extype" name="java.lang.AnyRef.synchronized.T0">T0</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="org.apache.spark.sql.DataFrameWriter#text" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="text(path:String):Unit"></a><a id="text(String):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#text(path:String):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">text</span><span class="params">(<span name="path">path: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<p class="shortcomment cmt">Saves the content of the <code>DataFrame</code> in a text file at the specified path.</p><div class="fullcomment"><div class="comment cmt"><p>Saves the content of the <code>DataFrame</code> in a text file at the specified path.
The DataFrame must have only one column that is of string type.
Each row becomes a new line in the output file. For example:</p><pre><span class="cmt">// Scala:</span>
df.write.text(<span class="lit">"/path/to/output"</span>)
<span class="cmt">// Java:</span>
df.write().text(<span class="lit">"/path/to/output"</span>)</pre><p>The text files will be encoded as UTF-8.</p><p>You can find the text-specific options for writing text files in
<a href="https://spark.apache.org/docs/latest/sql-data-sources-text.html#data-source-option">
Data Source Option</a> in the version you use.
</p></div><dl class="attributes block"> <dt>Since</dt><dd><p>1.6.0</p></dd></dl></div>
</li><li name="scala.AnyRef#toString" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="toString():String"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#toString():String" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">toString</span><span class="params">()</span><span class="result">: <span class="extype" name="java.lang.String">String</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait():Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#wait():Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.InterruptedException]">...</span>
</span>)</span>
</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait(x$1:Long,x$2:Int):Unit"></a><a id="wait(Long,Int):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#wait(x$1:Long,x$2:Int):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Long">Long</span></span>, <span name="arg1">arg1: <span class="extype" name="scala.Int">Int</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.InterruptedException]">...</span>
</span>)</span>
</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait(x$1:Long):Unit"></a><a id="wait(Long):Unit"></a>
<span class="permalink">
<a href="../../../../org/apache/spark/sql/DataFrameWriter.html#wait(x$1:Long):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Long">Long</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.InterruptedException]">...</span>
</span>)</span>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li>
</ol>
</div>
</div>
<div id="inheritedMembers">
<div class="parent" name="scala.AnyRef">
<h3>Inherited from <a href="../../../../scala/index.html#AnyRef=Object" class="extmbr" name="scala.AnyRef">AnyRef</a></h3>
</div><div class="parent" name="scala.Any">
<h3>Inherited from <span class="extype" name="scala.Any">Any</span></h3>
</div>
</div>
<div id="groupedMembers">
<div class="group" name="Ungrouped">
<h3>Ungrouped</h3>
</div>
</div>
</div>
<div id="tooltip"></div>
<div id="footer"> </div>
</body>
</div>
</div>
</div>
</body>
</html>