Skip to content

Commit 55e1317

Browse files
committed
added Since tags for public variables
1 parent fa61502 commit 55e1317

File tree

3 files changed

+11
-0
lines changed

3 files changed

+11
-0
lines changed

mllib/src/main/scala/org/apache/spark/ml/regression/GBTRegressor.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,7 @@ final class GBTRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: Stri
111111
* (default = squared)
112112
* @group param
113113
*/
114+
@Since("1.4.0")
114115
val lossType: Param[String] = new Param[String](this, "lossType", "Loss function which GBT" +
115116
" tries to minimize (case-insensitive). Supported options:" +
116117
s" ${GBTRegressor.supportedLossTypes.mkString(", ")}",
@@ -157,6 +158,7 @@ final class GBTRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: Stri
157158
object GBTRegressor {
158159
// The losses below should be lowercase.
159160
/** Accessor for supported loss settings: squared (L2), absolute (L1) */
161+
@Since("1.4.0")
160162
final val supportedLossTypes: Array[String] = Array("squared", "absolute").map(_.toLowerCase)
161163
}
162164

mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -430,6 +430,7 @@ class LinearRegressionTrainingSummary private[regression] (
430430
extends LinearRegressionSummary(predictions, predictionCol, labelCol) {
431431

432432
/** Number of training iterations until termination */
433+
@Since("1.5.0")
433434
val totalIterations = objectiveHistory.length
434435

435436
}
@@ -456,33 +457,39 @@ class LinearRegressionSummary private[regression] (
456457
* explainedVariance = 1 - variance(y - \hat{y}) / variance(y)
457458
* Reference: [[http://en.wikipedia.org/wiki/Explained_variation]]
458459
*/
460+
@Since("1.5.0")
459461
val explainedVariance: Double = metrics.explainedVariance
460462

461463
/**
462464
* Returns the mean absolute error, which is a risk function corresponding to the
463465
* expected value of the absolute error loss or l1-norm loss.
464466
*/
467+
@Since("1.5.0")
465468
val meanAbsoluteError: Double = metrics.meanAbsoluteError
466469

467470
/**
468471
* Returns the mean squared error, which is a risk function corresponding to the
469472
* expected value of the squared error loss or quadratic loss.
470473
*/
474+
@Since("1.5.0")
471475
val meanSquaredError: Double = metrics.meanSquaredError
472476

473477
/**
474478
* Returns the root mean squared error, which is defined as the square root of
475479
* the mean squared error.
476480
*/
481+
@Since("1.5.0")
477482
val rootMeanSquaredError: Double = metrics.rootMeanSquaredError
478483

479484
/**
480485
* Returns R^2^, the coefficient of determination.
481486
* Reference: [[http://en.wikipedia.org/wiki/Coefficient_of_determination]]
482487
*/
488+
@Since("1.5.0")
483489
val r2: Double = metrics.r2
484490

485491
/** Residuals (label - predicted value) */
492+
@Since("1.5.0")
486493
@transient lazy val residuals: DataFrame = {
487494
val t = udf { (pred: Double, label: Double) => label - pred }
488495
predictions.select(t(col(predictionCol), col(labelCol)).as("residuals"))

mllib/src/main/scala/org/apache/spark/ml/regression/RandomForestRegressor.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,9 +110,11 @@ final class RandomForestRegressor @Since("1.4.0") (@Since("1.4.0") override val
110110
@Experimental
111111
object RandomForestRegressor {
112112
/** Accessor for supported impurity settings: variance */
113+
@Since("1.4.0")
113114
final val supportedImpurities: Array[String] = TreeRegressorParams.supportedImpurities
114115

115116
/** Accessor for supported featureSubsetStrategy settings: auto, all, onethird, sqrt, log2 */
117+
@Since("1.4.0")
116118
final val supportedFeatureSubsetStrategies: Array[String] =
117119
RandomForestParams.supportedFeatureSubsetStrategies
118120
}

0 commit comments

Comments
 (0)