@@ -180,7 +180,7 @@ object MLUtils {
180180 }
181181
182182 /**
183- * Loads vectors saved using `RDD[Vector]# saveAsTextFile`.
183+ * Loads vectors saved using `RDD[Vector]. saveAsTextFile`.
184184 * @param sc Spark context
185185 * @param path file or directory path in any Hadoop-supported file system URI
186186 * @param minPartitions min number of partitions
@@ -190,13 +190,13 @@ object MLUtils {
190190 sc.textFile(path, minPartitions).map(Vectors .parse)
191191
192192 /**
193- * Loads vectors saved using `RDD[Vector]# saveAsTextFile` with the default number of partitions.
193+ * Loads vectors saved using `RDD[Vector]. saveAsTextFile` with the default number of partitions.
194194 */
195195 def loadVectors (sc : SparkContext , path : String ): RDD [Vector ] =
196196 sc.textFile(path, sc.defaultMinPartitions).map(Vectors .parse)
197197
198198 /**
199- * Loads labeled points saved using `RDD[LabeledPoint]# saveAsTextFile`.
199+ * Loads labeled points saved using `RDD[LabeledPoint]. saveAsTextFile`.
200200 * @param sc Spark context
201201 * @param path file or directory path in any Hadoop-supported file system URI
202202 * @param minPartitions min number of partitions
@@ -206,7 +206,7 @@ object MLUtils {
206206 sc.textFile(path, minPartitions).map(LabeledPoint .parse)
207207
208208 /**
209- * Loads labeled points saved using `RDD[LabeledPoint]# saveAsTextFile` with the default number of
209+ * Loads labeled points saved using `RDD[LabeledPoint]. saveAsTextFile` with the default number of
210210 * partitions.
211211 */
212212 def loadLabeledPoints (sc : SparkContext , dir : String ): RDD [LabeledPoint ] =
@@ -225,7 +225,7 @@ object MLUtils {
225225 * @deprecated Should use [[org.apache.spark.rdd.RDD#saveAsTextFile ]] for saving and
226226 * [[org.apache.spark.mllib.util.MLUtils#loadLabeledPoints ]] for loading.
227227 */
228- @ deprecated(" Should use RDD#saveAsTextFile and MLUtils# loadLabeledPoints instead." , " 1.0" )
228+ @ deprecated(" Should use MLUtils. loadLabeledPoints instead." , " 1.0" )
229229 def loadLabeledData (sc : SparkContext , dir : String ): RDD [LabeledPoint ] = {
230230 sc.textFile(dir).map { line =>
231231 val parts = line.split(',' )
@@ -246,7 +246,7 @@ object MLUtils {
246246 * @deprecated Should use [[org.apache.spark.rdd.RDD#saveAsTextFile ]] for saving and
247247 * [[org.apache.spark.mllib.util.MLUtils#loadLabeledPoints ]] for loading.
248248 */
249- @ deprecated(" Should use RDD# saveAsTextFile and MLUtils#loadLabeledPoints instead." , " 1.0" )
249+ @ deprecated(" Should use RDD[LabeledPoint]. saveAsTextFile instead." , " 1.0" )
250250 def saveLabeledData (data : RDD [LabeledPoint ], dir : String ) {
251251 val dataStr = data.map(x => x.label + " ," + x.features.toArray.mkString(" " ))
252252 dataStr.saveAsTextFile(dir)
0 commit comments