Skip to content

Commit 0675956

Browse files
committed
Preserving ordering and partitioning in row format converters also does not help.
1 parent cc5669c commit 0675956

File tree

1 file changed

+5
-0
lines changed

1 file changed

+5
-0
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/rowFormatConverters.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import org.apache.spark.annotation.DeveloperApi
2121
import org.apache.spark.rdd.RDD
2222
import org.apache.spark.sql.catalyst.InternalRow
2323
import org.apache.spark.sql.catalyst.expressions._
24+
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
2425
import org.apache.spark.sql.catalyst.rules.Rule
2526

2627
/**
@@ -33,6 +34,8 @@ case class ConvertToUnsafe(child: SparkPlan) extends UnaryNode {
3334
require(UnsafeProjection.canSupport(child.schema), s"Cannot convert ${child.schema} to Unsafe")
3435

3536
override def output: Seq[Attribute] = child.output
37+
override def outputPartitioning: Partitioning = child.outputPartitioning
38+
override def outputOrdering: Seq[SortOrder] = child.outputOrdering
3639
override def outputsUnsafeRows: Boolean = true
3740
override def canProcessUnsafeRows: Boolean = false
3841
override def canProcessSafeRows: Boolean = true
@@ -51,6 +54,8 @@ case class ConvertToUnsafe(child: SparkPlan) extends UnaryNode {
5154
@DeveloperApi
5255
case class ConvertToSafe(child: SparkPlan) extends UnaryNode {
5356
override def output: Seq[Attribute] = child.output
57+
override def outputPartitioning: Partitioning = child.outputPartitioning
58+
override def outputOrdering: Seq[SortOrder] = child.outputOrdering
5459
override def outputsUnsafeRows: Boolean = false
5560
override def canProcessUnsafeRows: Boolean = true
5661
override def canProcessSafeRows: Boolean = false

0 commit comments

Comments
 (0)