File tree Expand file tree Collapse file tree 1 file changed +0
-20
lines changed
mllib/src/test/scala/org/apache/spark/ml/tree/impl Expand file tree Collapse file tree 1 file changed +0
-20
lines changed Original file line number Diff line number Diff line change @@ -237,26 +237,6 @@ private[ml] object TreeTests extends SparkFunSuite {
237237 new LabeledPoint (14.0 , Vectors .dense(Array (5.0 )))
238238 ))
239239
240- /**
241- * Create toy data that can be used for testing deep tree training; the generated data requires
242- * [[depth ]] splits to split fully. Thus a tree fit on the generated data should have a depth of
243- * [[depth ]] (unless splitting halts early due to other constraints e.g. max depth or min
244- * info gain).
245- */
246- def deepTreeData (sc : SparkContext , depth : Int ): RDD [LabeledPoint ] = {
247- // Create a dataset with [[depth]] binary features; a training point has a label of 1
248- // iff all features have a value of 1.
249- sc.parallelize(Range (0 , depth + 1 ).map { idx =>
250- val features = Array .fill[Double ](depth)(1 )
251- if (idx == depth) {
252- LabeledPoint (1.0 , Vectors .dense(features))
253- } else {
254- features(idx) = 0.0
255- LabeledPoint (0.0 , Vectors .dense(features))
256- }
257- })
258- }
259-
260240 /**
261241 * Mapping from all Params to valid settings which differ from the defaults.
262242 * This is useful for tests which need to exercise all Params, such as save/load.
You can’t perform that action at this time.
0 commit comments