@@ -21,6 +21,7 @@ import org.apache.spark.annotation.DeveloperApi
2121import org .apache .spark .rdd .RDD
2222import org .apache .spark .sql .catalyst .InternalRow
2323import org .apache .spark .sql .catalyst .expressions ._
24+ import org .apache .spark .sql .catalyst .plans .physical .Partitioning
2425import org .apache .spark .sql .catalyst .rules .Rule
2526
2627/**
@@ -33,6 +34,8 @@ case class ConvertToUnsafe(child: SparkPlan) extends UnaryNode {
3334 require(UnsafeProjection .canSupport(child.schema), s " Cannot convert ${child.schema} to Unsafe " )
3435
3536 override def output : Seq [Attribute ] = child.output
37+ override def outputPartitioning : Partitioning = child.outputPartitioning
38+ override def outputOrdering : Seq [SortOrder ] = child.outputOrdering
3639 override def outputsUnsafeRows : Boolean = true
3740 override def canProcessUnsafeRows : Boolean = false
3841 override def canProcessSafeRows : Boolean = true
@@ -51,6 +54,8 @@ case class ConvertToUnsafe(child: SparkPlan) extends UnaryNode {
5154@ DeveloperApi
5255case class ConvertToSafe (child : SparkPlan ) extends UnaryNode {
5356 override def output : Seq [Attribute ] = child.output
57+ override def outputPartitioning : Partitioning = child.outputPartitioning
58+ override def outputOrdering : Seq [SortOrder ] = child.outputOrdering
5459 override def outputsUnsafeRows : Boolean = false
5560 override def canProcessUnsafeRows : Boolean = true
5661 override def canProcessSafeRows : Boolean = false
0 commit comments