@@ -258,7 +258,7 @@ final class OnlineLDAOptimizer extends LDAOptimizer {
258258 private var tau0 : Double = 1024
259259 private var kappa : Double = 0.51
260260 private var miniBatchFraction : Double = 0.05
261- private var optimizeAlpha : Boolean = false
261+ private var optimizeDocConcentration : Boolean = false
262262
263263 // internal data structure
264264 private var docs : RDD [(Long , Vector )] = null
@@ -335,20 +335,20 @@ final class OnlineLDAOptimizer extends LDAOptimizer {
335335 }
336336
337337 /**
338- * Optimize alpha , indicates whether alpha (Dirichlet parameter for document-topic distribution)
339- * will be optimized during training.
338+ * Optimize docConcentration , indicates whether docConcentration (Dirichlet parameter for
339+ * document-topic distribution) will be optimized during training.
340340 */
341341 @ Since (" 1.5.0" )
342- def getOptimzeAlpha : Boolean = this .optimizeAlpha
342+ def getOptimizeDocConcentration : Boolean = this .optimizeDocConcentration
343343
344344 /**
345- * Sets whether to optimize alpha parameter during training.
345+ * Sets whether to optimize docConcentration parameter during training.
346346 *
347347 * Default: false
348348 */
349349 @ Since (" 1.5.0" )
350- def setOptimzeAlpha ( optimizeAlpha : Boolean ): this .type = {
351- this .optimizeAlpha = optimizeAlpha
350+ def setOptimizeDocConcentration ( optimizeDocConcentration : Boolean ): this .type = {
351+ this .optimizeDocConcentration = optimizeDocConcentration
352352 this
353353 }
354354
@@ -458,7 +458,7 @@ final class OnlineLDAOptimizer extends LDAOptimizer {
458458
459459 // Note that this is an optimization to avoid batch.count
460460 updateLambda(batchResult, (miniBatchFraction * corpusSize).ceil.toInt)
461- if (optimizeAlpha ) updateAlpha(gammat)
461+ if (optimizeDocConcentration ) updateAlpha(gammat)
462462 this
463463 }
464464
0 commit comments