Skip to content

Commit 5807e35

Browse files
committed
renamed coalescepartitions
1 parent fa4509f commit 5807e35

File tree

6 files changed

+11
-11
lines changed

6 files changed

+11
-11
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -310,7 +310,7 @@ case class Distinct(child: LogicalPlan) extends UnaryNode {
310310
override def output: Seq[Attribute] = child.output
311311
}
312312

313-
case class CoalescePartitions(numPartitions: Int, shuffle: Boolean, child: LogicalPlan)
313+
case class Repartition(numPartitions: Int, shuffle: Boolean, child: LogicalPlan)
314314
extends UnaryNode {
315315
override def output: Seq[Attribute] = child.output
316316
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,5 +32,5 @@ abstract class RedistributeData extends UnaryNode {
3232
case class SortPartitions(sortExpressions: Seq[SortOrder], child: LogicalPlan)
3333
extends RedistributeData
3434

35-
case class Repartition(partitionExpressions: Seq[Expression], child: LogicalPlan)
35+
case class RepartitionByExpression(partitionExpressions: Seq[Expression], child: LogicalPlan)
3636
extends RedistributeData

sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -961,7 +961,7 @@ class DataFrame private[sql](
961961
* @group rdd
962962
*/
963963
override def repartition(numPartitions: Int): DataFrame = {
964-
CoalescePartitions(numPartitions, shuffle = true, logicalPlan)
964+
Repartition(numPartitions, shuffle = true, logicalPlan)
965965
}
966966

967967
/**
@@ -972,7 +972,7 @@ class DataFrame private[sql](
972972
* @group rdd
973973
*/
974974
override def coalesce(numPartitions: Int): DataFrame = {
975-
CoalescePartitions(numPartitions, shuffle = false, logicalPlan)
975+
Repartition(numPartitions, shuffle = false, logicalPlan)
976976
}
977977

978978
/**

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -283,8 +283,8 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
283283
case logical.Distinct(child) =>
284284
execution.Distinct(partial = false,
285285
execution.Distinct(partial = true, planLater(child))) :: Nil
286-
case logical.CoalescePartitions(numPartitions, shuffle, child) =>
287-
execution.CoalescePartitions(numPartitions, shuffle, planLater(child)) :: Nil
286+
case logical.Repartition(numPartitions, shuffle, child) =>
287+
execution.Repartition(numPartitions, shuffle, planLater(child)) :: Nil
288288
case logical.SortPartitions(sortExprs, child) =>
289289
// This sort only sorts tuples within a partition. Its requiredDistribution will be
290290
// an UnspecifiedDistribution.
@@ -318,7 +318,7 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
318318
generator, join = join, outer = outer, g.output, planLater(child)) :: Nil
319319
case logical.OneRowRelation =>
320320
execution.PhysicalRDD(Nil, singleRowRdd) :: Nil
321-
case logical.Repartition(expressions, child) =>
321+
case logical.RepartitionByExpression(expressions, child) =>
322322
execution.Exchange(
323323
HashPartitioning(expressions, numPartitions), Nil, planLater(child)) :: Nil
324324
case e @ EvaluatePython(udf, child, _) =>

sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ case class Distinct(partial: Boolean, child: SparkPlan) extends UnaryNode {
250250
* Return a new RDD that has exactly `numPartitions` partitions.
251251
*/
252252
@DeveloperApi
253-
case class CoalescePartitions(numPartitions: Int, shuffle: Boolean, child: SparkPlan)
253+
case class Repartition(numPartitions: Int, shuffle: Boolean, child: SparkPlan)
254254
extends UnaryNode {
255255
override def output: Seq[Attribute] = child.output
256256

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -783,13 +783,13 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
783783
case (None, Some(perPartitionOrdering), None, None) =>
784784
Sort(perPartitionOrdering.getChildren.map(nodeToSortOrder), false, withHaving)
785785
case (None, None, Some(partitionExprs), None) =>
786-
Repartition(partitionExprs.getChildren.map(nodeToExpr), withHaving)
786+
RepartitionByExpression(partitionExprs.getChildren.map(nodeToExpr), withHaving)
787787
case (None, Some(perPartitionOrdering), Some(partitionExprs), None) =>
788788
Sort(perPartitionOrdering.getChildren.map(nodeToSortOrder), false,
789-
Repartition(partitionExprs.getChildren.map(nodeToExpr), withHaving))
789+
RepartitionByExpression(partitionExprs.getChildren.map(nodeToExpr), withHaving))
790790
case (None, None, None, Some(clusterExprs)) =>
791791
Sort(clusterExprs.getChildren.map(nodeToExpr).map(SortOrder(_, Ascending)), false,
792-
Repartition(clusterExprs.getChildren.map(nodeToExpr), withHaving))
792+
RepartitionByExpression(clusterExprs.getChildren.map(nodeToExpr), withHaving))
793793
case (None, None, None, None) => withHaving
794794
case _ => sys.error("Unsupported set of ordering / distribution clauses.")
795795
}

0 commit comments

Comments
 (0)