Spark 3.5.5 ScalaDoc - org.apache.spark.sql.DataFrameWriterV2 (original) (raw)
final class DataFrameWriterV2[T] extends CreateTableWriter[T]
Ordering
- Alphabetic
- By Inheritance
Inherited
DataFrameWriterV2
CreateTableWriter
WriteConfigMethods
AnyRef
Any
Hide All
Show All
Value Members
- final def !=(arg0: Any): Boolean
- final def ##(): Int
- final def ==(arg0: Any): Boolean
- def append(): Unit
- final def asInstanceOf[T0]: T0
- def clone(): AnyRef
- def create(): Unit
- def createOrReplace(): Unit
- final def eq(arg0: AnyRef): Boolean
- def equals(arg0: Any): Boolean
- def finalize(): Unit
- final def getClass(): Class[_]
- def hashCode(): Int
- final def isInstanceOf[T0]: Boolean
- final def ne(arg0: AnyRef): Boolean
- final def notify(): Unit
- final def notifyAll(): Unit
- def option(key: String, value: String): DataFrameWriterV2[T]
- def option(key: String, value: Double): CreateTableWriter[T]
- def option(key: String, value: Long): CreateTableWriter[T]
- def option(key: String, value: Boolean): CreateTableWriter[T]
- def options(options: Map[String, String]): DataFrameWriterV2[T]
- def options(options: Map[String, String]): DataFrameWriterV2[T]
- def overwrite(condition: Column): Unit
- def overwritePartitions(): Unit
- def partitionedBy(column: Column, columns: Column*): CreateTableWriter[T]
- def replace(): Unit
- final def synchronized[T0](arg0: ⇒ T0): T0
- def tableProperty(property: String, value: String): CreateTableWriter[T]
- def toString(): String
- def using(provider: String): CreateTableWriter[T]
- final def wait(): Unit
- final def wait(arg0: Long, arg1: Int): Unit
- final def wait(arg0: Long): Unit