Spark 4.1.0 ScalaDoc - org.apache.spark.sql.DataFrameWriter (original) (raw)
abstract class DataFrameWriter[T] extends AnyRef
Ordering
- Alphabetic
- By Inheritance
Inherited
DataFrameWriter
AnyRef
Any
Hide All
Show All
Visibility
- Public
- Protected
Instance Constructors
- new DataFrameWriter()
Abstract Value Members
- abstract def insertInto(tableName: String): Unit
- abstract def save(): Unit
- abstract def save(path: String): Unit
- abstract def saveAsTable(tableName: String): Unit
Concrete Value Members
- final def !=(arg0: Any): Boolean
- final def ##: Int
- final def ==(arg0: Any): Boolean
- final def asInstanceOf[T0]: T0
- def assertNotBucketed(operation: String): Unit
- def assertNotClustered(operation: String): Unit
- def assertNotPartitioned(operation: String): Unit
- def bucketBy(numBuckets: Int, colName: String, colNames: String*): DataFrameWriter.this.type
- var bucketColumnNames: Option[Seq[String]]
- def clone(): AnyRef
- def clusterBy(colName: String, colNames: String*): DataFrameWriter.this.type
- var clusteringColumns: Option[Seq[String]]
- def csv(path: String): Unit
- var curmode: SaveMode
- final def eq(arg0: AnyRef): Boolean
- def equals(arg0: AnyRef): Boolean
- var extraOptions: CaseInsensitiveMap[String]
- def format(source: String): DataFrameWriter.this.type
- final def getClass(): Class[_ <: AnyRef]
- def hashCode(): Int
- def isBucketed(): Boolean
- final def isInstanceOf[T0]: Boolean
- def jdbc(url: String, table: String, connectionProperties: Properties): Unit
- def json(path: String): Unit
- def mode(saveMode: String): DataFrameWriter.this.type
- def mode(saveMode: SaveMode): DataFrameWriter.this.type
- final def ne(arg0: AnyRef): Boolean
- final def notify(): Unit
- final def notifyAll(): Unit
- var numBuckets: Option[Int]
- def option(key: String, value: Double): DataFrameWriter.this.type
- def option(key: String, value: Long): DataFrameWriter.this.type
- def option(key: String, value: Boolean): DataFrameWriter.this.type
- def option(key: String, value: String): DataFrameWriter.this.type
- def options(options: Map[String, String]): DataFrameWriter.this.type
- def options(options: Map[String, String]): DataFrameWriter.this.type
- def orc(path: String): Unit
- def parquet(path: String): Unit
- def partitionBy(colNames: String*): DataFrameWriter.this.type
- var partitioningColumns: Option[Seq[String]]
- def sortBy(colName: String, colNames: String*): DataFrameWriter.this.type
- var sortColumnNames: Option[Seq[String]]
- var source: String
- final def synchronized[T0](arg0: => T0): T0
- def text(path: String): Unit
- def toString(): String
- def validatePartitioning(): Unit
- final def wait(arg0: Long, arg1: Int): Unit
- final def wait(arg0: Long): Unit
- final def wait(): Unit
- def xml(path: String): Unit
Deprecated Value Members
- def finalize(): Unit