Spark 3.5.5 ScalaDoc - org.apache.spark.mllib.classification.LogisticRegressionModel (original) (raw)
class LogisticRegressionModel extends GeneralizedLinearModel with ClassificationModel with Serializable with Saveable with PMMLExportable
Ordering
- Alphabetic
- By Inheritance
Inherited
LogisticRegressionModel
PMMLExportable
Saveable
ClassificationModel
GeneralizedLinearModel
Serializable
Serializable
AnyRef
Any
Hide All
Show All
Instance Constructors
- new LogisticRegressionModel(weights: Vector, intercept: Double)
- new LogisticRegressionModel(weights: Vector, intercept: Double, numFeatures: Int, numClasses: Int)
Value Members
- final def !=(arg0: Any): Boolean
- final def ##(): Int
- final def ==(arg0: Any): Boolean
- final def asInstanceOf[T0]: T0
- def clearThreshold(): LogisticRegressionModel.this.type
- def clone(): AnyRef
- final def eq(arg0: AnyRef): Boolean
- def equals(arg0: Any): Boolean
- def finalize(): Unit
- final def getClass(): Class[_]
- def getThreshold: Option[Double]
- def hashCode(): Int
- val intercept: Double
- final def isInstanceOf[T0]: Boolean
- final def ne(arg0: AnyRef): Boolean
- final def notify(): Unit
- final def notifyAll(): Unit
- val numClasses: Int
- val numFeatures: Int
- def predict(testData: JavaRDD[Vector]): JavaRDD[Double]
- def predict(testData: Vector): Double
- def predict(testData: RDD[Vector]): RDD[Double]
- def predictPoint(dataMatrix: Vector, weightMatrix: Vector, intercept: Double): Double
- def save(sc: SparkContext, path: String): Unit
- def setThreshold(threshold: Double): LogisticRegressionModel.this.type
- final def synchronized[T0](arg0: ⇒ T0): T0
- def toPMML(): String
- def toPMML(outputStream: OutputStream): Unit
- def toPMML(sc: SparkContext, path: String): Unit
- def toPMML(localPath: String): Unit
- def toString(): String
- final def wait(): Unit
- final def wait(arg0: Long, arg1: Int): Unit
- final def wait(arg0: Long): Unit
- val weights: Vector