Skip to content

Commit f10574e

Browse files
committed
Fix an validatino problem because of Scala collection's implicit type conversion
1 parent 7237c36 commit f10574e

File tree

1 file changed

+5
-6
lines changed

1 file changed

+5
-6
lines changed

mllib/src/main/scala/org/apache/spark/mllib/api/python/LDAModelWrapper.scala

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
*/
1717
package org.apache.spark.mllib.api.python
1818

19+
import scala.collection.JavaConverters
20+
1921
import org.apache.spark.SparkContext
2022
import org.apache.spark.mllib.clustering.LDAModel
2123
import org.apache.spark.mllib.linalg.Matrix
@@ -32,14 +34,11 @@ private[python] class LDAModelWrapper(model: LDAModel) {
3234
def describeTopics(): java.util.List[Array[Any]] = describeTopics(this.model.vocabSize)
3335

3436
def describeTopics(maxTermsPerTopic: Int): java.util.List[Array[Any]] = {
35-
import scala.collection.JavaConversions._
3637

37-
val javaList: java.util.List[Array[Any]] =
38-
model.describeTopics(maxTermsPerTopic).map { case (terms, termWeights) =>
39-
var array = Array.empty[Any]
38+
val seq = model.describeTopics(maxTermsPerTopic).map { case (terms, termWeights) =>
4039
Array.empty[Any] ++ terms ++ termWeights
41-
}.toList
42-
javaList
40+
}.toSeq
41+
JavaConverters.seqAsJavaListConverter(seq).asJava
4342
}
4443

4544
def save(sc: SparkContext, path: String): Unit = model.save(sc, path)

0 commit comments

Comments
 (0)