Skip to content

Commit d74dde5

Browse files
Remove the redundant mkQueryExecution() method.
1 parent c129b86 commit d74dde5

File tree

3 files changed

+3
-8
lines changed

3 files changed

+3
-8
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.spark.sql.catalyst.{ScalaReflection, dsl}
3131
import org.apache.spark.sql.catalyst.expressions._
3232
import org.apache.spark.sql.catalyst.types._
3333
import org.apache.spark.sql.catalyst.optimizer.Optimizer
34-
import org.apache.spark.sql.catalyst.plans.logical.{SetCommand, Subquery, LogicalPlan}
34+
import org.apache.spark.sql.catalyst.plans.logical.{SetCommand, LogicalPlan}
3535
import org.apache.spark.sql.catalyst.rules.RuleExecutor
3636

3737
import org.apache.spark.sql.columnar.InMemoryColumnarTableScan
@@ -264,11 +264,6 @@ class SQLContext(@transient val sparkContext: SparkContext)
264264
Batch("Prepare Expressions", Once, new BindReferences[SparkPlan]) :: Nil
265265
}
266266

267-
// TODO: or should we make QueryExecution protected[sql]?
268-
protected[sql] def mkQueryExecution(plan: LogicalPlan) = new QueryExecution {
269-
val logical = plan
270-
}
271-
272267
/**
273268
* The primary workflow for executing relational queries using Spark. Designed to allow easy
274269
* access to the intermediate phases of query execution for developers.

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,7 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
239239
case logical.SetCommand(key, value) =>
240240
Seq(execution.SetCommandPhysical(key, value, plan.output)(context))
241241
case logical.ExplainCommand(child) =>
242-
val qe = context.mkQueryExecution(child)
242+
val qe = context.executePlan(child)
243243
Seq(execution.ExplainCommandPhysical(qe.executedPlan, plan.output)(context))
244244
case _ => Nil
245245
}

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -315,7 +315,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
315315
*/
316316
def stringResult(): Seq[String] = analyzed match {
317317
case NativeCommand(cmd) => runSqlHive(cmd)
318-
case ExplainCommand(plan) => mkQueryExecution(plan).toString.split("\n")
318+
case ExplainCommand(plan) => self.executePlan(plan).toString.split("\n")
319319
case query =>
320320
val result: Seq[Seq[Any]] = toRdd.collect().toSeq
321321
// We need the types so we can output struct field names

0 commit comments

Comments
 (0)