Skip to content

Commit a0ff7c4

Browse files
committed
Adding catalyst rule to fold two consecutive limits
Creating a LimitFolding Batch ssdg
1 parent 8d42d03 commit a0ff7c4

File tree

1 file changed

+12
-0
lines changed
  • sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer

1 file changed

+12
-0
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@ import org.apache.spark.sql.catalyst.types._
2929

3030
object Optimizer extends RuleExecutor[LogicalPlan] {
3131
val batches =
32+
Batch("LimitFolding", FixedPoint(100),
33+
CombineLimits) ::
3234
Batch("ConstantFolding", FixedPoint(100),
3335
NullPropagation,
3436
ConstantFolding,
@@ -362,3 +364,13 @@ object SimplifyCasts extends Rule[LogicalPlan] {
362364
case Cast(e, dataType) if e.dataType == dataType => e
363365
}
364366
}
367+
368+
/**
369+
* Combines two adjacent [[catalyst.plans.logical.Limit Limit]] operators into one, merging the
370+
* expressions into one single expression.
371+
*/
372+
object CombineLimits extends Rule[LogicalPlan] {
373+
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
374+
case ll @ Limit(le, nl @ Limit(ne, grandChild)) => Limit(If(LessThan(ne, le), ne, le), grandChild)
375+
}
376+
}

0 commit comments

Comments
 (0)