Spark 3.5.5 ScalaDoc - org.apache.spark.sql.DataFrameReader (original) (raw)
class DataFrameReader extends Logging
Ordering
- Alphabetic
- By Inheritance
Inherited
DataFrameReader
Logging
AnyRef
Any
Hide All
Show All
Value Members
- final def !=(arg0: Any): Boolean
- final def ##(): Int
- final def ==(arg0: Any): Boolean
- final def asInstanceOf[T0]: T0
- def clone(): AnyRef
- def csv(paths: String*): DataFrame
- def csv(csvDataset: Dataset[String]): DataFrame
- def csv(path: String): DataFrame
- final def eq(arg0: AnyRef): Boolean
- def equals(arg0: Any): Boolean
- def finalize(): Unit
- def format(source: String): DataFrameReader
- final def getClass(): Class[_]
- def hashCode(): Int
- def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
- def initializeLogIfNecessary(isInterpreter: Boolean): Unit
- final def isInstanceOf[T0]: Boolean
- def isTraceEnabled(): Boolean
- def jdbc(url: String, table: String, predicates: Array[String], connectionProperties: Properties): DataFrame
- def jdbc(url: String, table: String, columnName: String, lowerBound: Long, upperBound: Long, numPartitions: Int, connectionProperties: Properties): DataFrame
- def jdbc(url: String, table: String, properties: Properties): DataFrame
- def json(jsonDataset: Dataset[String]): DataFrame
- def json(paths: String*): DataFrame
- def json(path: String): DataFrame
- def load(paths: String*): DataFrame
- def load(path: String): DataFrame
- def load(): DataFrame
- def log: Logger
- def logDebug(msg: ⇒ String, throwable: Throwable): Unit
- def logDebug(msg: ⇒ String): Unit
- def logError(msg: ⇒ String, throwable: Throwable): Unit
- def logError(msg: ⇒ String): Unit
- def logInfo(msg: ⇒ String, throwable: Throwable): Unit
- def logInfo(msg: ⇒ String): Unit
- def logName: String
- def logTrace(msg: ⇒ String, throwable: Throwable): Unit
- def logTrace(msg: ⇒ String): Unit
- def logWarning(msg: ⇒ String, throwable: Throwable): Unit
- def logWarning(msg: ⇒ String): Unit
- final def ne(arg0: AnyRef): Boolean
- final def notify(): Unit
- final def notifyAll(): Unit
- def option(key: String, value: Double): DataFrameReader
- def option(key: String, value: Long): DataFrameReader
- def option(key: String, value: Boolean): DataFrameReader
- def option(key: String, value: String): DataFrameReader
- def options(options: Map[String, String]): DataFrameReader
- def options(options: Map[String, String]): DataFrameReader
- def orc(paths: String*): DataFrame
- def orc(path: String): DataFrame
- def parquet(paths: String*): DataFrame
- def parquet(path: String): DataFrame
- def schema(schemaString: String): DataFrameReader
- def schema(schema: StructType): DataFrameReader
- final def synchronized[T0](arg0: ⇒ T0): T0
- def table(tableName: String): DataFrame
- def text(paths: String*): DataFrame
- def text(path: String): DataFrame
- def textFile(paths: String*): Dataset[String]
- def textFile(path: String): Dataset[String]
- def toString(): String
- final def wait(): Unit
- final def wait(arg0: Long, arg1: Int): Unit
- final def wait(arg0: Long): Unit