File tree Expand file tree Collapse file tree 1 file changed +4
-4
lines changed
sql/core/src/main/scala/org/apache/spark/sql/execution Expand file tree Collapse file tree 1 file changed +4
-4
lines changed Original file line number Diff line number Diff line change @@ -168,20 +168,20 @@ case class TakeOrderedAndProject(
168168
169169 private val projection = projectList.map(newProjection(_, child.output))
170170
171- private def collectData (): Iterator [InternalRow ] = {
172- val data = child.execute().map(_.copy()).takeOrdered(limit)(ord).toIterator
171+ private def collectData (): Array [InternalRow ] = {
172+ val data = child.execute().map(_.copy()).takeOrdered(limit)(ord)
173173 projection.map(data.map(_)).getOrElse(data)
174174 }
175175
176176 override def executeCollect (): Array [Row ] = {
177177 val converter = CatalystTypeConverters .createToScalaConverter(schema)
178- collectData().map(converter(_).asInstanceOf [Row ]).toArray
178+ collectData().map(converter(_).asInstanceOf [Row ])
179179 }
180180
181181 // TODO: Terminal split should be implemented differently from non-terminal split.
182182 // TODO: Pick num splits based on |limit|.
183183 protected override def doExecute (): RDD [InternalRow ] =
184- sparkContext.makeRDD(collectData().toArray[ InternalRow ] , 1 )
184+ sparkContext.makeRDD(collectData(), 1 )
185185
186186 override def outputOrdering : Seq [SortOrder ] = sortOrder
187187}
You can’t perform that action at this time.
0 commit comments