Spark 3.5.5 ScalaDoc - org.apache.spark.sql.SparkSession (original) (raw)
class SparkSession extends Serializable with Closeable with Logging
Ordering
- Alphabetic
- By Inheritance
Inherited
SparkSession
Logging
Closeable
AutoCloseable
Serializable
Serializable
AnyRef
Any
Hide All
Show All
Value Members
- final def !=(arg0: Any): Boolean
- final def ##(): Int
- final def ==(arg0: Any): Boolean
- final def asInstanceOf[T0]: T0
- def baseRelationToDataFrame(baseRelation: BaseRelation): DataFrame
- lazy val catalog: Catalog
- def clone(): AnyRef
- def close(): Unit
- lazy val conf: RuntimeConfig
- def createDataFrame(data: List[_], beanClass: Class[_]): DataFrame
- def createDataFrame(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame
- def createDataFrame(rdd: RDD[_], beanClass: Class[_]): DataFrame
- def createDataFrame(rows: List[Row], schema: StructType): DataFrame
- def createDataFrame(rowRDD: JavaRDD[Row], schema: StructType): DataFrame
- def createDataFrame(rowRDD: RDD[Row], schema: StructType): DataFrame
- def createDataFrame[A <: Product](data: Seq[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame
- def createDataFrame[A <: Product](rdd: RDD[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame
- def createDataset[T](data: List[T])(implicit arg0: Encoder[T]): Dataset[T]
- def createDataset[T](data: RDD[T])(implicit arg0: Encoder[T]): Dataset[T]
- def createDataset[T](data: Seq[T])(implicit arg0: Encoder[T]): Dataset[T]
- lazy val emptyDataFrame: DataFrame
- def emptyDataset[T](implicit arg0: Encoder[T]): Dataset[T]
- final def eq(arg0: AnyRef): Boolean
- def equals(arg0: Any): Boolean
- def executeCommand(runner: String, command: String, options: Map[String, String]): DataFrame
- def experimental: ExperimentalMethods
- def finalize(): Unit
- final def getClass(): Class[_]
- def hashCode(): Int
- def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
- def initializeLogIfNecessary(isInterpreter: Boolean): Unit
- final def isInstanceOf[T0]: Boolean
- def isTraceEnabled(): Boolean
- def listenerManager: ExecutionListenerManager
- def log: Logger
- def logDebug(msg: ⇒ String, throwable: Throwable): Unit
- def logDebug(msg: ⇒ String): Unit
- def logError(msg: ⇒ String, throwable: Throwable): Unit
- def logError(msg: ⇒ String): Unit
- def logInfo(msg: ⇒ String, throwable: Throwable): Unit
- def logInfo(msg: ⇒ String): Unit
- def logName: String
- def logTrace(msg: ⇒ String, throwable: Throwable): Unit
- def logTrace(msg: ⇒ String): Unit
- def logWarning(msg: ⇒ String, throwable: Throwable): Unit
- def logWarning(msg: ⇒ String): Unit
- final def ne(arg0: AnyRef): Boolean
- def newSession(): SparkSession
- final def notify(): Unit
- final def notifyAll(): Unit
- def parseDataType(dataTypeString: String): DataType
- def range(start: Long, end: Long, step: Long, numPartitions: Int): Dataset[Long]
- def range(start: Long, end: Long, step: Long): Dataset[Long]
- def range(start: Long, end: Long): Dataset[Long]
- def range(end: Long): Dataset[Long]
- def read: DataFrameReader
- def readStream: DataStreamReader
- lazy val sessionState: SessionState
- lazy val sharedState: SharedState
- val sparkContext: SparkContext
- def sql(sqlText: String): DataFrame
- def sql(sqlText: String, args: Map[String, Any]): DataFrame
- def sql(sqlText: String, args: Map[String, Any]): DataFrame
- def sql(sqlText: String, args: Array[_]): DataFrame
- val sqlContext: SQLContext
- def stop(): Unit
- def streams: StreamingQueryManager
- final def synchronized[T0](arg0: ⇒ T0): T0
- def table(tableName: String): DataFrame
- def time[T](f: ⇒ T): T
- def toString(): String
- def udf: UDFRegistration
- def udtf: UDTFRegistration
- def version: String
- final def wait(): Unit
- final def wait(arg0: Long, arg1: Int): Unit
- final def wait(arg0: Long): Unit
- object implicits extends SQLImplicits with Serializable