1717
1818package org .apache .spark .examples .mllib
1919
20+ import org .apache .spark .SparkConf
21+ import org .apache .spark .mllib .clustering .StreamingKMeans
2022import org .apache .spark .mllib .linalg .Vectors
2123import org .apache .spark .mllib .regression .LabeledPoint
22- import org .apache .spark .mllib .clustering .StreamingKMeans
23- import org .apache .spark .SparkConf
2424import org .apache .spark .streaming .{Seconds , StreamingContext }
2525
2626/**
@@ -36,28 +36,28 @@ import org.apache.spark.streaming.{Seconds, StreamingContext}
3636 * `(y,[x1,x2,x3,...,xn])`
3737 * Where y is some identifier. n must be the same for train and test.
3838 *
39- * Usage: StreamingKmeans <trainingDir> <testDir> <batchDuration> <numClusters> <numDimensions>
39+ * Usage:
40+ * StreamingKMeansExample <trainingDir> <testDir> <batchDuration> <numClusters> <numDimensions>
4041 *
4142 * To run on your local machine using the two directories `trainingDir` and `testDir`,
4243 * with updates every 5 seconds, 2 dimensions per data point, and 3 clusters, call:
43- * $ bin/run-example \
44- * org.apache.spark.examples.mllib.StreamingKMeans trainingDir testDir 5 3 2
44+ * $ bin/run-example mllib.StreamingKMeansExample trainingDir testDir 5 3 2
4545 *
4646 * As you add text files to `trainingDir` the clusters will continuously update.
4747 * Anytime you add text files to `testDir`, you'll see predicted labels using the current model.
4848 *
4949 */
50- object StreamingKMeans {
50+ object StreamingKMeansExample {
5151
5252 def main (args : Array [String ]) {
5353 if (args.length != 5 ) {
5454 System .err.println(
55- " Usage: StreamingKMeans " +
55+ " Usage: StreamingKMeansExample " +
5656 " <trainingDir> <testDir> <batchDuration> <numClusters> <numDimensions>" )
5757 System .exit(1 )
5858 }
5959
60- val conf = new SparkConf ().setMaster(" local" ).setAppName(" StreamingLinearRegression " )
60+ val conf = new SparkConf ().setMaster(" local" ).setAppName(" StreamingKMeansExample " )
6161 val ssc = new StreamingContext (conf, Seconds (args(2 ).toLong))
6262
6363 val trainingData = ssc.textFileStream(args(0 )).map(Vectors .parse)
0 commit comments