Skip to content

Commit d05866c

Browse files
author
Davies Liu
committed
fix test: rollback changes for pyspark
1 parent 72878dd commit d05866c

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1501,8 +1501,8 @@ class DataFrame private[sql](
15011501
*/
15021502
protected[sql] def javaToPython: JavaRDD[Array[Byte]] = {
15031503
val fieldTypes = schema.fields.map(_.dataType)
1504-
val rdd = queryExecution.executedPlan.execute().map(EvaluatePython.rowToArray(_, fieldTypes))
1505-
SerDeUtil.javaToPython(rdd.toJavaRDD())
1504+
val jrdd = rdd.map(EvaluatePython.rowToArray(_, fieldTypes)).toJavaRDD()
1505+
SerDeUtil.javaToPython(jrdd)
15061506
}
15071507

15081508
////////////////////////////////////////////////////////////////////////////

sql/core/src/main/scala/org/apache/spark/sql/execution/pythonUdfs.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,9 +157,9 @@ object EvaluatePython {
157157
}
158158

159159
/**
160-
* Convert InternalRow into Java Array (for pickled into Python)
160+
* Convert Row into Java Array (for pickled into Python)
161161
*/
162-
def rowToArray(row: InternalRow, fields: Seq[DataType]): Array[Any] = {
162+
def rowToArray(row: Row, fields: Seq[DataType]): Array[Any] = {
163163
// TODO: this is slow!
164164
row.toSeq.zip(fields).map {case (obj, dt) => toJava(obj, dt)}.toArray
165165
}

0 commit comments

Comments
 (0)