Skip to content

Commit 9146e19

Browse files
author
DB Tsai
committed
initial commit
1 parent 6c65da6 commit 9146e19

File tree

1 file changed

+5
-12
lines changed

1 file changed

+5
-12
lines changed

mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,7 @@ import breeze.optimize.{CachedDiffFunction, DiffFunction}
2525

2626
import org.apache.spark.annotation.AlphaComponent
2727
import org.apache.spark.ml.param.{Params, ParamMap}
28-
import org.apache.spark.ml.param.shared.{HasTol, HasElasticNetParam, HasMaxIter,
29-
HasRegParam}
28+
import org.apache.spark.ml.param.shared.{HasTol, HasElasticNetParam, HasMaxIter, HasRegParam}
3029
import org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
3130
import org.apache.spark.mllib.linalg.{Vector, Vectors}
3231
import org.apache.spark.mllib.linalg.BLAS._
@@ -103,9 +102,7 @@ class LinearRegression extends Regressor[Vector, LinearRegression, LinearRegress
103102
case LabeledPoint(label: Double, features: Vector) => (label, features)
104103
}
105104
val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel.NONE
106-
if (handlePersistence) {
107-
instances.persist(StorageLevel.MEMORY_AND_DISK)
108-
}
105+
if (handlePersistence) instances.persist(StorageLevel.MEMORY_AND_DISK)
109106

110107
val (summarizer, statCounter) = instances.treeAggregate(
111108
(new MultivariateOnlineSummarizer, new StatCounter))( {
@@ -146,8 +143,7 @@ class LinearRegression extends Regressor[Vector, LinearRegression, LinearRegress
146143
val optimizer = if (paramMap(elasticNetParam) == 0.0 || effectiveRegParam == 0.0) {
147144
new BreezeLBFGS[BDV[Double]](paramMap(maxIter), 10, paramMap(tol))
148145
} else {
149-
new BreezeOWLQN[Int, BDV[Double]](paramMap(maxIter), 10, effectiveL1RegParam,
150-
paramMap(tol))
146+
new BreezeOWLQN[Int, BDV[Double]](paramMap(maxIter), 10, effectiveL1RegParam, paramMap(tol))
151147
}
152148

153149
val initialWeights = Vectors.zeros(numFeatures)
@@ -304,9 +300,8 @@ private class LeastSquaresAggregator(
304300
featuresStd: Array[Double],
305301
featuresMean: Array[Double]) extends Serializable {
306302

307-
private var totalCnt: Long = 0
303+
private var totalCnt: Long = 0L
308304
private var lossSum = 0.0
309-
private var diffSum = 0.0
310305

311306
private val (effectiveWeightsArray: Array[Double], offset: Double, dim: Int) = {
312307
val weightsArray = weights.toArray.clone()
@@ -325,7 +320,7 @@ private class LeastSquaresAggregator(
325320
}
326321
private val effectiveWeightsVector = Vectors.dense(effectiveWeightsArray)
327322

328-
private val gradientSumArray: Array[Double] = Array.ofDim[Double](dim)
323+
private val gradientSumArray = Array.ofDim[Double](dim)
329324

330325
/**
331326
* Add a new training data to this LeastSquaresAggregator, and update the loss and gradient
@@ -350,7 +345,6 @@ private class LeastSquaresAggregator(
350345
}
351346
}
352347
lossSum += diff * diff / 2.0
353-
diffSum += diff
354348
}
355349

356350
totalCnt += 1
@@ -372,7 +366,6 @@ private class LeastSquaresAggregator(
372366
if (other.totalCnt != 0) {
373367
totalCnt += other.totalCnt
374368
lossSum += other.lossSum
375-
diffSum += other.diffSum
376369

377370
var i = 0
378371
val localThisGradientSumArray = this.gradientSumArray

0 commit comments

Comments
 (0)