Skip to content

Commit 30961ba

Browse files
committed
adjust spaces in mllib/test
1 parent 571b5c5 commit 30961ba

File tree

15 files changed

+47
-29
lines changed

15 files changed

+47
-29
lines changed

mllib/src/test/scala/org/apache/spark/ml/feature/Word2VecSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,9 +35,9 @@ class Word2VecSuite extends FunSuite with MLlibTestSparkContext {
3535
val doc = sc.parallelize(Seq(sentence, sentence)).map(line => line.split(" "))
3636

3737
val codes = Map(
38-
"a" -> Array(-0.2811822295188904,-0.6356269121170044,-0.3020961284637451),
39-
"b" -> Array(1.0309048891067505,-1.29472815990448,0.22276712954044342),
40-
"c" -> Array(-0.08456747233867645,0.5137411952018738,0.11731560528278351)
38+
"a" -> Array(-0.2811822295188904, -0.6356269121170044, -0.3020961284637451),
39+
"b" -> Array(1.0309048891067505, -1.29472815990448, 0.22276712954044342),
40+
"c" -> Array(-0.08456747233867645, 0.5137411952018738, 0.11731560528278351)
4141
)
4242

4343
val expected = doc.map { sentence =>

mllib/src/test/scala/org/apache/spark/ml/tuning/CrossValidatorSuite.scala

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -90,14 +90,20 @@ object CrossValidatorSuite {
9090

9191
override def validateParams(): Unit = require($(inputCol).nonEmpty)
9292

93-
override def fit(dataset: DataFrame): MyModel = ???
93+
override def fit(dataset: DataFrame): MyModel = {
94+
throw new UnsupportedOperationException
95+
}
9496

95-
override def transformSchema(schema: StructType): StructType = ???
97+
override def transformSchema(schema: StructType): StructType = {
98+
throw new UnsupportedOperationException
99+
}
96100
}
97101

98102
class MyEvaluator extends Evaluator {
99103

100-
override def evaluate(dataset: DataFrame): Double = ???
104+
override def evaluate(dataset: DataFrame): Double = {
105+
throw new UnsupportedOperationException
106+
}
101107

102108
override val uid: String = "eval"
103109
}

mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ class PythonMLLibAPISuite extends FunSuite {
8484

8585
val smt = new SparseMatrix(
8686
3, 3, Array(0, 2, 3, 5), Array(0, 2, 1, 0, 2), Array(0.9, 1.2, 3.4, 5.7, 8.9),
87-
isTransposed=true)
87+
isTransposed = true)
8888
val nsmt = SerDe.loads(SerDe.dumps(smt)).asInstanceOf[SparseMatrix]
8989
assert(smt.toArray === nsmt.toArray)
9090
}

mllib/src/test/scala/org/apache/spark/mllib/classification/NaiveBayesSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ class NaiveBayesSuite extends FunSuite with MLlibTestSparkContext {
163163
val theta = Array(
164164
Array(0.50, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.40), // label 0
165165
Array(0.02, 0.70, 0.10, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02), // label 1
166-
Array(0.02, 0.02, 0.60, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.30) // label 2
166+
Array(0.02, 0.02, 0.60, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.30) // label 2
167167
).map(_.map(math.log))
168168

169169
val testData = NaiveBayesSuite.generateNaiveBayesInput(

mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ object SVMSuite {
4646
nPoints: Int,
4747
seed: Int): Seq[LabeledPoint] = {
4848
val rnd = new Random(seed)
49-
val weightsMat = new DoubleMatrix(1, weights.length, weights:_*)
49+
val weightsMat = new DoubleMatrix(1, weights.length, weights : _*)
5050
val x = Array.fill[Array[Double]](nPoints)(
5151
Array.fill[Double](weights.length)(rnd.nextDouble() * 2.0 - 1.0))
5252
val y = x.map { xi =>
@@ -91,7 +91,7 @@ class SVMSuite extends FunSuite with MLlibTestSparkContext {
9191
val model = svm.run(testRDD)
9292

9393
val validationData = SVMSuite.generateSVMInput(A, Array[Double](B, C), nPoints, 17)
94-
val validationRDD = sc.parallelize(validationData, 2)
94+
val validationRDD = sc.parallelize(validationData, 2)
9595

9696
// Test prediction on RDD.
9797

@@ -117,7 +117,7 @@ class SVMSuite extends FunSuite with MLlibTestSparkContext {
117117
val B = -1.5
118118
val C = 1.0
119119

120-
val testData = SVMSuite.generateSVMInput(A, Array[Double](B,C), nPoints, 42)
120+
val testData = SVMSuite.generateSVMInput(A, Array[Double](B, C), nPoints, 42)
121121

122122
val testRDD = sc.parallelize(testData, 2)
123123
testRDD.cache()
@@ -127,8 +127,8 @@ class SVMSuite extends FunSuite with MLlibTestSparkContext {
127127

128128
val model = svm.run(testRDD)
129129

130-
val validationData = SVMSuite.generateSVMInput(A, Array[Double](B,C), nPoints, 17)
131-
val validationRDD = sc.parallelize(validationData, 2)
130+
val validationData = SVMSuite.generateSVMInput(A, Array[Double](B, C), nPoints, 17)
131+
val validationRDD = sc.parallelize(validationData, 2)
132132

133133
// Test prediction on RDD.
134134
validatePrediction(model.predict(validationRDD.map(_.features)).collect(), validationData)
@@ -145,7 +145,7 @@ class SVMSuite extends FunSuite with MLlibTestSparkContext {
145145
val B = -1.5
146146
val C = 1.0
147147

148-
val testData = SVMSuite.generateSVMInput(A, Array[Double](B,C), nPoints, 42)
148+
val testData = SVMSuite.generateSVMInput(A, Array[Double](B, C), nPoints, 42)
149149

150150
val initialB = -1.0
151151
val initialC = -1.0
@@ -159,8 +159,8 @@ class SVMSuite extends FunSuite with MLlibTestSparkContext {
159159

160160
val model = svm.run(testRDD, initialWeights)
161161

162-
val validationData = SVMSuite.generateSVMInput(A, Array[Double](B,C), nPoints, 17)
163-
val validationRDD = sc.parallelize(validationData,2)
162+
val validationData = SVMSuite.generateSVMInput(A, Array[Double](B, C), nPoints, 17)
163+
val validationRDD = sc.parallelize(validationData, 2)
164164

165165
// Test prediction on RDD.
166166
validatePrediction(model.predict(validationRDD.map(_.features)).collect(), validationData)
@@ -177,7 +177,7 @@ class SVMSuite extends FunSuite with MLlibTestSparkContext {
177177
val B = -1.5
178178
val C = 1.0
179179

180-
val testData = SVMSuite.generateSVMInput(A, Array[Double](B,C), nPoints, 42)
180+
val testData = SVMSuite.generateSVMInput(A, Array[Double](B, C), nPoints, 42)
181181
val testRDD = sc.parallelize(testData, 2)
182182

183183
val testRDDInvalid = testRDD.map { lp =>

mllib/src/test/scala/org/apache/spark/mllib/clustering/KMeansSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ class KMeansSuite extends FunSuite with MLlibTestSparkContext {
7575
val center = Vectors.dense(1.0, 2.0, 3.0)
7676

7777
// Make sure code runs.
78-
var model = KMeans.train(data, k=2, maxIterations=1)
78+
var model = KMeans.train(data, k = 2, maxIterations = 1)
7979
assert(model.clusterCenters.size === 2)
8080
}
8181

@@ -87,7 +87,7 @@ class KMeansSuite extends FunSuite with MLlibTestSparkContext {
8787
2)
8888

8989
// Make sure code runs.
90-
var model = KMeans.train(data, k=3, maxIterations=1)
90+
var model = KMeans.train(data, k = 3, maxIterations = 1)
9191
assert(model.clusterCenters.size === 3)
9292
}
9393

mllib/src/test/scala/org/apache/spark/mllib/clustering/PowerIterationClusteringSuite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,11 +94,13 @@ class PowerIterationClusteringSuite extends FunSuite with MLlibTestSparkContext
9494
*/
9595
val similarities = Seq[(Long, Long, Double)](
9696
(0, 1, 1.0), (0, 2, 1.0), (0, 3, 1.0), (1, 2, 1.0), (2, 3, 1.0))
97+
// scalastyle:off
9798
val expected = Array(
9899
Array(0.0, 1.0/3.0, 1.0/3.0, 1.0/3.0),
99100
Array(1.0/2.0, 0.0, 1.0/2.0, 0.0),
100101
Array(1.0/3.0, 1.0/3.0, 0.0, 1.0/3.0),
101102
Array(1.0/2.0, 0.0, 1.0/2.0, 0.0))
103+
// scalastyle:on
102104
val w = normalize(sc.parallelize(similarities, 2))
103105
w.edges.collect().foreach { case Edge(i, j, x) =>
104106
assert(x ~== expected(i.toInt)(j.toInt) absTol 1e-14)

mllib/src/test/scala/org/apache/spark/mllib/evaluation/RegressionMetricsSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ class RegressionMetricsSuite extends FunSuite with MLlibTestSparkContext {
2626

2727
test("regression metrics") {
2828
val predictionAndObservations = sc.parallelize(
29-
Seq((2.5,3.0),(0.0,-0.5),(2.0,2.0),(8.0,7.0)), 2)
29+
Seq((2.5, 3.0), (0.0, -0.5), (2.0, 2.0), (8.0, 7.0)), 2)
3030
val metrics = new RegressionMetrics(predictionAndObservations)
3131
assert(metrics.explainedVariance ~== 0.95717 absTol 1E-5,
3232
"explained variance regression score mismatch")
@@ -39,7 +39,7 @@ class RegressionMetricsSuite extends FunSuite with MLlibTestSparkContext {
3939

4040
test("regression metrics with complete fitting") {
4141
val predictionAndObservations = sc.parallelize(
42-
Seq((3.0,3.0),(0.0,0.0),(2.0,2.0),(8.0,8.0)), 2)
42+
Seq((3.0, 3.0), (0.0, 0.0), (2.0, 2.0), (8.0, 8.0)), 2)
4343
val metrics = new RegressionMetrics(predictionAndObservations)
4444
assert(metrics.explainedVariance ~== 1.0 absTol 1E-5,
4545
"explained variance regression score mismatch")

mllib/src/test/scala/org/apache/spark/mllib/feature/StandardScalerSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -360,7 +360,7 @@ class StandardScalerSuite extends FunSuite with MLlibTestSparkContext {
360360
}
361361
withClue("model needs std and mean vectors to be equal size when both are provided") {
362362
intercept[IllegalArgumentException] {
363-
val model = new StandardScalerModel(Vectors.dense(0.0), Vectors.dense(0.0,1.0))
363+
val model = new StandardScalerModel(Vectors.dense(0.0), Vectors.dense(0.0, 1.0))
364364
}
365365
}
366366
}

mllib/src/test/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrixSuite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,11 +57,13 @@ class BlockMatrixSuite extends FunSuite with MLlibTestSparkContext {
5757
val random = new ju.Random()
5858
// This should generate a 4x4 grid of 1x2 blocks.
5959
val part0 = GridPartitioner(4, 7, suggestedNumPartitions = 12)
60+
// scalastyle:off
6061
val expected0 = Array(
6162
Array(0, 0, 4, 4, 8, 8, 12),
6263
Array(1, 1, 5, 5, 9, 9, 13),
6364
Array(2, 2, 6, 6, 10, 10, 14),
6465
Array(3, 3, 7, 7, 11, 11, 15))
66+
// scalastyle:on
6567
for (i <- 0 until 4; j <- 0 until 7) {
6668
assert(part0.getPartition((i, j)) === expected0(i)(j))
6769
assert(part0.getPartition((i, j, random.nextInt())) === expected0(i)(j))

0 commit comments

Comments
 (0)