Skip to content

Commit a97e358

Browse files
committed
address comments
1 parent 052ad6b commit a97e358

File tree

2 files changed

+9
-9
lines changed
  • sql
    • catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical
    • core/src/main/scala/org/apache/spark/sql/execution

2 files changed

+9
-9
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/object.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ case class DeserializeToObject(
9494
*/
9595
case class SerializeFromObject(
9696
serializer: Seq[NamedExpression],
97-
child: LogicalPlan) extends UnaryNode with ObjectConsumer {
97+
child: LogicalPlan) extends ObjectConsumer {
9898

9999
override def output: Seq[Attribute] = serializer.map(_.toAttribute)
100100
}
@@ -118,7 +118,7 @@ object MapPartitions {
118118
case class MapPartitions(
119119
func: Iterator[Any] => Iterator[Any],
120120
outputObjAttr: Attribute,
121-
child: LogicalPlan) extends UnaryNode with ObjectConsumer with ObjectProducer
121+
child: LogicalPlan) extends ObjectConsumer with ObjectProducer
122122

123123
object MapPartitionsInR {
124124
def apply(
@@ -152,7 +152,7 @@ case class MapPartitionsInR(
152152
inputSchema: StructType,
153153
outputSchema: StructType,
154154
outputObjAttr: Attribute,
155-
child: LogicalPlan) extends UnaryNode with ObjectConsumer with ObjectProducer {
155+
child: LogicalPlan) extends ObjectConsumer with ObjectProducer {
156156
override lazy val schema = outputSchema
157157
}
158158

@@ -175,7 +175,7 @@ object MapElements {
175175
case class MapElements(
176176
func: AnyRef,
177177
outputObjAttr: Attribute,
178-
child: LogicalPlan) extends UnaryNode with ObjectConsumer with ObjectProducer
178+
child: LogicalPlan) extends ObjectConsumer with ObjectProducer
179179

180180
/** Factory for constructing new `AppendColumn` nodes. */
181181
object AppendColumns {
@@ -215,7 +215,7 @@ case class AppendColumnsWithObject(
215215
func: Any => Any,
216216
childSerializer: Seq[NamedExpression],
217217
newColumnsSerializer: Seq[NamedExpression],
218-
child: LogicalPlan) extends UnaryNode with ObjectConsumer {
218+
child: LogicalPlan) extends ObjectConsumer {
219219

220220
override def output: Seq[Attribute] = (childSerializer ++ newColumnsSerializer).map(_.toAttribute)
221221
}

sql/core/src/main/scala/org/apache/spark/sql/execution/objects.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ case class DeserializeToObjectExec(
9494
*/
9595
case class SerializeFromObjectExec(
9696
serializer: Seq[NamedExpression],
97-
child: SparkPlan) extends UnaryExecNode with ObjectConsumerExec with CodegenSupport {
97+
child: SparkPlan) extends ObjectConsumerExec with CodegenSupport {
9898

9999
override def output: Seq[Attribute] = serializer.map(_.toAttribute)
100100

@@ -165,7 +165,7 @@ case class MapPartitionsExec(
165165
func: Iterator[Any] => Iterator[Any],
166166
outputObjAttr: Attribute,
167167
child: SparkPlan)
168-
extends UnaryExecNode with ObjectProducerExec with ObjectConsumerExec {
168+
extends ObjectConsumerExec with ObjectProducerExec {
169169

170170
override protected def doExecute(): RDD[InternalRow] = {
171171
child.execute().mapPartitionsInternal { iter =>
@@ -187,7 +187,7 @@ case class MapElementsExec(
187187
func: AnyRef,
188188
outputObjAttr: Attribute,
189189
child: SparkPlan)
190-
extends UnaryExecNode with ObjectProducerExec with ObjectConsumerExec with CodegenSupport {
190+
extends ObjectConsumerExec with ObjectProducerExec with CodegenSupport {
191191

192192
override def inputRDDs(): Seq[RDD[InternalRow]] = {
193193
child.asInstanceOf[CodegenSupport].inputRDDs()
@@ -264,7 +264,7 @@ case class AppendColumnsWithObjectExec(
264264
func: Any => Any,
265265
inputSerializer: Seq[NamedExpression],
266266
newColumnsSerializer: Seq[NamedExpression],
267-
child: SparkPlan) extends UnaryExecNode with ObjectConsumerExec {
267+
child: SparkPlan) extends ObjectConsumerExec {
268268

269269
override def output: Seq[Attribute] = (inputSerializer ++ newColumnsSerializer).map(_.toAttribute)
270270

0 commit comments

Comments
 (0)