Skip to content

Commit 4f7c36f

Browse files
committed
Since annotation for ml.regression
1 parent 032748b commit 4f7c36f

File tree

5 files changed

+77
-54
lines changed

5 files changed

+77
-54
lines changed

mllib/src/main/scala/org/apache/spark/ml/regression/DecisionTreeRegressor.scala

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.ml.regression
1919

20-
import org.apache.spark.annotation.Experimental
20+
import org.apache.spark.annotation.{Experimental, Since}
2121
import org.apache.spark.ml.{PredictionModel, Predictor}
2222
import org.apache.spark.ml.param.ParamMap
2323
import org.apache.spark.ml.tree.{DecisionTreeModel, DecisionTreeParams, Node, TreeRegressorParams}
@@ -36,30 +36,31 @@ import org.apache.spark.sql.DataFrame
3636
* for regression.
3737
* It supports both continuous and categorical features.
3838
*/
39+
@Since("1.4.0")
3940
@Experimental
40-
final class DecisionTreeRegressor(override val uid: String)
41+
final class DecisionTreeRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: String)
4142
extends Predictor[Vector, DecisionTreeRegressor, DecisionTreeRegressionModel]
4243
with DecisionTreeParams with TreeRegressorParams {
43-
44+
@Since("1.4.0")
4445
def this() = this(Identifiable.randomUID("dtr"))
4546

4647
// Override parameter setters from parent trait for Java API compatibility.
47-
48+
@Since("1.4.0")
4849
override def setMaxDepth(value: Int): this.type = super.setMaxDepth(value)
49-
50+
@Since("1.4.0")
5051
override def setMaxBins(value: Int): this.type = super.setMaxBins(value)
51-
52+
@Since("1.4.0")
5253
override def setMinInstancesPerNode(value: Int): this.type =
5354
super.setMinInstancesPerNode(value)
54-
55+
@Since("1.4.0")
5556
override def setMinInfoGain(value: Double): this.type = super.setMinInfoGain(value)
56-
57+
@Since("1.4.0")
5758
override def setMaxMemoryInMB(value: Int): this.type = super.setMaxMemoryInMB(value)
58-
59+
@Since("1.4.0")
5960
override def setCacheNodeIds(value: Boolean): this.type = super.setCacheNodeIds(value)
60-
61+
@Since("1.4.0")
6162
override def setCheckpointInterval(value: Int): this.type = super.setCheckpointInterval(value)
62-
63+
@Since("1.4.0")
6364
override def setImpurity(value: String): this.type = super.setImpurity(value)
6465

6566
override protected def train(dataset: DataFrame): DecisionTreeRegressionModel = {
@@ -77,10 +78,11 @@ final class DecisionTreeRegressor(override val uid: String)
7778
super.getOldStrategy(categoricalFeatures, numClasses = 0, OldAlgo.Regression, getOldImpurity,
7879
subsamplingRate = 1.0)
7980
}
80-
81+
@Since("1.4.0")
8182
override def copy(extra: ParamMap): DecisionTreeRegressor = defaultCopy(extra)
8283
}
8384

85+
@Since("1.4.0")
8486
@Experimental
8587
object DecisionTreeRegressor {
8688
/** Accessor for supported impurities: variance */

mllib/src/main/scala/org/apache/spark/ml/regression/GBTRegressor.scala

Lines changed: 20 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.ml.regression
2020
import com.github.fommil.netlib.BLAS.{getInstance => blas}
2121

2222
import org.apache.spark.Logging
23-
import org.apache.spark.annotation.Experimental
23+
import org.apache.spark.annotation.{Experimental, Since}
2424
import org.apache.spark.ml.{PredictionModel, Predictor}
2525
import org.apache.spark.ml.param.{Param, ParamMap}
2626
import org.apache.spark.ml.tree.{DecisionTreeModel, GBTParams, TreeEnsembleModel, TreeRegressorParams}
@@ -42,54 +42,56 @@ import org.apache.spark.sql.types.DoubleType
4242
* learning algorithm for regression.
4343
* It supports both continuous and categorical features.
4444
*/
45+
@Since("1.4.0")
4546
@Experimental
46-
final class GBTRegressor(override val uid: String)
47+
final class GBTRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: String)
4748
extends Predictor[Vector, GBTRegressor, GBTRegressionModel]
4849
with GBTParams with TreeRegressorParams with Logging {
49-
50+
@Since("1.4.0")
5051
def this() = this(Identifiable.randomUID("gbtr"))
5152

5253
// Override parameter setters from parent trait for Java API compatibility.
5354

5455
// Parameters from TreeRegressorParams:
55-
56+
@Since("1.4.0")
5657
override def setMaxDepth(value: Int): this.type = super.setMaxDepth(value)
57-
58+
@Since("1.4.0")
5859
override def setMaxBins(value: Int): this.type = super.setMaxBins(value)
59-
60+
@Since("1.4.0")
6061
override def setMinInstancesPerNode(value: Int): this.type =
6162
super.setMinInstancesPerNode(value)
62-
63+
@Since("1.4.0")
6364
override def setMinInfoGain(value: Double): this.type = super.setMinInfoGain(value)
64-
65+
@Since("1.4.0")
6566
override def setMaxMemoryInMB(value: Int): this.type = super.setMaxMemoryInMB(value)
66-
67+
@Since("1.4.0")
6768
override def setCacheNodeIds(value: Boolean): this.type = super.setCacheNodeIds(value)
68-
69+
@Since("1.4.0")
6970
override def setCheckpointInterval(value: Int): this.type = super.setCheckpointInterval(value)
7071

7172
/**
7273
* The impurity setting is ignored for GBT models.
7374
* Individual trees are built using impurity "Variance."
7475
*/
76+
@Since("1.4.0")
7577
override def setImpurity(value: String): this.type = {
7678
logWarning("GBTRegressor.setImpurity should NOT be used")
7779
this
7880
}
7981

8082
// Parameters from TreeEnsembleParams:
81-
83+
@Since("1.4.0")
8284
override def setSubsamplingRate(value: Double): this.type = super.setSubsamplingRate(value)
83-
85+
@Since("1.4.0")
8486
override def setSeed(value: Long): this.type = {
8587
logWarning("The 'seed' parameter is currently ignored by Gradient Boosting.")
8688
super.setSeed(value)
8789
}
8890

8991
// Parameters from GBTParams:
90-
92+
@Since("1.4.0")
9193
override def setMaxIter(value: Int): this.type = super.setMaxIter(value)
92-
94+
@Since("1.4.0")
9395
override def setStepSize(value: Double): this.type = super.setStepSize(value)
9496

9597
// Parameters for GBTRegressor:
@@ -108,9 +110,11 @@ final class GBTRegressor(override val uid: String)
108110
setDefault(lossType -> "squared")
109111

110112
/** @group setParam */
113+
@Since("1.4.0")
111114
def setLossType(value: String): this.type = set(lossType, value)
112115

113116
/** @group getParam */
117+
@Since("1.4.0")
114118
def getLossType: String = $(lossType).toLowerCase
115119

116120
/** (private[ml]) Convert new loss to old loss. */
@@ -134,10 +138,11 @@ final class GBTRegressor(override val uid: String)
134138
val oldModel = oldGBT.run(oldDataset)
135139
GBTRegressionModel.fromOld(oldModel, this, categoricalFeatures, numFeatures)
136140
}
137-
141+
@Since("1.4.0")
138142
override def copy(extra: ParamMap): GBTRegressor = defaultCopy(extra)
139143
}
140144

145+
@Since("1.4.0")
141146
@Experimental
142147
object GBTRegressor {
143148
// The losses below should be lowercase.

mllib/src/main/scala/org/apache/spark/ml/regression/IsotonicRegression.scala

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.ml.regression
1919

2020
import org.apache.spark.Logging
21-
import org.apache.spark.annotation.Experimental
21+
import org.apache.spark.annotation.{Experimental, Since}
2222
import org.apache.spark.ml.{Estimator, Model}
2323
import org.apache.spark.ml.param._
2424
import org.apache.spark.ml.param.shared.{HasFeaturesCol, HasLabelCol, HasPredictionCol, HasWeightCol}
@@ -124,32 +124,39 @@ private[regression] trait IsotonicRegressionBase extends Params with HasFeatures
124124
*
125125
* Uses [[org.apache.spark.mllib.regression.IsotonicRegression]].
126126
*/
127+
@Since("1.5.0")
127128
@Experimental
128-
class IsotonicRegression(override val uid: String) extends Estimator[IsotonicRegressionModel]
129-
with IsotonicRegressionBase {
130-
129+
class IsotonicRegression @Since("1.5.0") (@Since("1.5.0") override val uid: String)
130+
extends Estimator[IsotonicRegressionModel] with IsotonicRegressionBase {
131+
@Since("1.5.0")
131132
def this() = this(Identifiable.randomUID("isoReg"))
132133

133134
/** @group setParam */
135+
@Since("1.5.0")
134136
def setLabelCol(value: String): this.type = set(labelCol, value)
135137

136138
/** @group setParam */
139+
@Since("1.5.0")
137140
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
138141

139142
/** @group setParam */
143+
@Since("1.5.0")
140144
def setPredictionCol(value: String): this.type = set(predictionCol, value)
141145

142146
/** @group setParam */
147+
@Since("1.5.0")
143148
def setIsotonic(value: Boolean): this.type = set(isotonic, value)
144149

145150
/** @group setParam */
151+
@Since("1.5.0")
146152
def setWeightCol(value: String): this.type = set(weightCol, value)
147153

148154
/** @group setParam */
155+
@Since("1.5.0")
149156
def setFeatureIndex(value: Int): this.type = set(featureIndex, value)
150-
157+
@Since("1.5.0")
151158
override def copy(extra: ParamMap): IsotonicRegression = defaultCopy(extra)
152-
159+
@Since("1.5.0")
153160
override def fit(dataset: DataFrame): IsotonicRegressionModel = {
154161
validateAndTransformSchema(dataset.schema, fitting = true)
155162
// Extract columns from data. If dataset is persisted, do not persist oldDataset.
@@ -162,7 +169,7 @@ class IsotonicRegression(override val uid: String) extends Estimator[IsotonicReg
162169

163170
copyValues(new IsotonicRegressionModel(uid, oldModel).setParent(this))
164171
}
165-
172+
@Since("1.5.0")
166173
override def transformSchema(schema: StructType): StructType = {
167174
validateAndTransformSchema(schema, fitting = true)
168175
}

mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,9 @@ import breeze.linalg.{DenseVector => BDV}
2323
import breeze.optimize.{CachedDiffFunction, DiffFunction, LBFGS => BreezeLBFGS, OWLQN => BreezeOWLQN}
2424

2525
import org.apache.spark.{Logging, SparkException}
26-
import org.apache.spark.annotation.Experimental
2726
import org.apache.spark.ml.feature.Instance
2827
import org.apache.spark.ml.optim.WeightedLeastSquares
28+
import org.apache.spark.annotation.{Experimental, Since}
2929
import org.apache.spark.ml.PredictorParams
3030
import org.apache.spark.ml.param.ParamMap
3131
import org.apache.spark.ml.param.shared._
@@ -60,18 +60,20 @@ private[regression] trait LinearRegressionParams extends PredictorParams
6060
* - L1 (Lasso)
6161
* - L2 + L1 (elastic net)
6262
*/
63+
@Since("1.3.0")
6364
@Experimental
64-
class LinearRegression(override val uid: String)
65+
class LinearRegression @Since("1.3.0") (@Since("1.3.0") override val uid: String)
6566
extends Regressor[Vector, LinearRegression, LinearRegressionModel]
6667
with LinearRegressionParams with Logging {
67-
68+
@Since("1.4.0")
6869
def this() = this(Identifiable.randomUID("linReg"))
6970

7071
/**
7172
* Set the regularization parameter.
7273
* Default is 0.0.
7374
* @group setParam
7475
*/
76+
@Since("1.3.0")
7577
def setRegParam(value: Double): this.type = set(regParam, value)
7678
setDefault(regParam -> 0.0)
7779

@@ -80,6 +82,7 @@ class LinearRegression(override val uid: String)
8082
* Default is true.
8183
* @group setParam
8284
*/
85+
@Since("1.5.0")
8386
def setFitIntercept(value: Boolean): this.type = set(fitIntercept, value)
8487
setDefault(fitIntercept -> true)
8588

@@ -92,6 +95,7 @@ class LinearRegression(override val uid: String)
9295
* Default is true.
9396
* @group setParam
9497
*/
98+
@Since("1.5.0")
9599
def setStandardization(value: Boolean): this.type = set(standardization, value)
96100
setDefault(standardization -> true)
97101

@@ -102,6 +106,7 @@ class LinearRegression(override val uid: String)
102106
* Default is 0.0 which is an L2 penalty.
103107
* @group setParam
104108
*/
109+
@Since("1.4.0")
105110
def setElasticNetParam(value: Double): this.type = set(elasticNetParam, value)
106111
setDefault(elasticNetParam -> 0.0)
107112

@@ -110,6 +115,7 @@ class LinearRegression(override val uid: String)
110115
* Default is 100.
111116
* @group setParam
112117
*/
118+
@Since("1.3.0")
113119
def setMaxIter(value: Int): this.type = set(maxIter, value)
114120
setDefault(maxIter -> 100)
115121

@@ -119,6 +125,7 @@ class LinearRegression(override val uid: String)
119125
* Default is 1E-6.
120126
* @group setParam
121127
*/
128+
@Since("1.4.0")
122129
def setTol(value: Double): this.type = set(tol, value)
123130
setDefault(tol -> 1E-6)
124131

@@ -320,7 +327,7 @@ class LinearRegression(override val uid: String)
320327
objectiveHistory)
321328
model.setSummary(trainingSummary)
322329
}
323-
330+
@Since("1.3.0")
324331
override def copy(extra: ParamMap): LinearRegression = defaultCopy(extra)
325332
}
326333

mllib/src/main/scala/org/apache/spark/ml/regression/RandomForestRegressor.scala

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.ml.regression
1919

20-
import org.apache.spark.annotation.Experimental
20+
import org.apache.spark.annotation.{Experimental, Since}
2121
import org.apache.spark.ml.{PredictionModel, Predictor}
2222
import org.apache.spark.ml.param.ParamMap
2323
import org.apache.spark.ml.tree.{DecisionTreeModel, RandomForestParams, TreeEnsembleModel, TreeRegressorParams}
@@ -37,44 +37,45 @@ import org.apache.spark.sql.functions._
3737
* [[http://en.wikipedia.org/wiki/Random_forest Random Forest]] learning algorithm for regression.
3838
* It supports both continuous and categorical features.
3939
*/
40+
@Since("1.4.0")
4041
@Experimental
41-
final class RandomForestRegressor(override val uid: String)
42+
final class RandomForestRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: String)
4243
extends Predictor[Vector, RandomForestRegressor, RandomForestRegressionModel]
4344
with RandomForestParams with TreeRegressorParams {
44-
45+
@Since("1.4.0")
4546
def this() = this(Identifiable.randomUID("rfr"))
4647

4748
// Override parameter setters from parent trait for Java API compatibility.
4849

4950
// Parameters from TreeRegressorParams:
50-
51+
@Since("1.4.0")
5152
override def setMaxDepth(value: Int): this.type = super.setMaxDepth(value)
52-
53+
@Since("1.4.0")
5354
override def setMaxBins(value: Int): this.type = super.setMaxBins(value)
54-
55+
@Since("1.4.0")
5556
override def setMinInstancesPerNode(value: Int): this.type =
5657
super.setMinInstancesPerNode(value)
57-
58+
@Since("1.4.0")
5859
override def setMinInfoGain(value: Double): this.type = super.setMinInfoGain(value)
59-
60+
@Since("1.4.0")
6061
override def setMaxMemoryInMB(value: Int): this.type = super.setMaxMemoryInMB(value)
61-
62+
@Since("1.4.0")
6263
override def setCacheNodeIds(value: Boolean): this.type = super.setCacheNodeIds(value)
63-
64+
@Since("1.4.0")
6465
override def setCheckpointInterval(value: Int): this.type = super.setCheckpointInterval(value)
65-
66+
@Since("1.4.0")
6667
override def setImpurity(value: String): this.type = super.setImpurity(value)
6768

6869
// Parameters from TreeEnsembleParams:
69-
70+
@Since("1.4.0")
7071
override def setSubsamplingRate(value: Double): this.type = super.setSubsamplingRate(value)
71-
72+
@Since("1.4.0")
7273
override def setSeed(value: Long): this.type = super.setSeed(value)
7374

7475
// Parameters from RandomForestParams:
75-
76+
@Since("1.4.0")
7677
override def setNumTrees(value: Int): this.type = super.setNumTrees(value)
77-
78+
@Since("1.4.0")
7879
override def setFeatureSubsetStrategy(value: String): this.type =
7980
super.setFeatureSubsetStrategy(value)
8081

@@ -90,10 +91,11 @@ final class RandomForestRegressor(override val uid: String)
9091
val numFeatures = oldDataset.first().features.size
9192
new RandomForestRegressionModel(trees, numFeatures)
9293
}
93-
94+
@Since("1.4.0")
9495
override def copy(extra: ParamMap): RandomForestRegressor = defaultCopy(extra)
9596
}
9697

98+
@Since("1.4.0")
9799
@Experimental
98100
object RandomForestRegressor {
99101
/** Accessor for supported impurity settings: variance */

0 commit comments

Comments
 (0)