@@ -35,6 +35,7 @@ import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, ReturnAnswer}
3535import org .apache .spark .sql .catalyst .plans .physical .{Distribution , UnspecifiedDistribution }
3636import org .apache .spark .sql .catalyst .rules .{PlanChangeLogger , Rule }
3737import org .apache .spark .sql .catalyst .trees .TreeNodeTag
38+ import org .apache .spark .sql .catalyst .util .sideBySide
3839import org .apache .spark .sql .errors .QueryExecutionErrors
3940import org .apache .spark .sql .execution ._
4041import org .apache .spark .sql .execution .adaptive .AdaptiveSparkPlanExec ._
@@ -306,7 +307,8 @@ case class AdaptiveSparkPlanExec(
306307 val newCost = costEvaluator.evaluateCost(newPhysicalPlan)
307308 if (newCost < origCost ||
308309 (newCost == origCost && currentPhysicalPlan != newPhysicalPlan)) {
309- logOnLevel(s " Plan changed from $currentPhysicalPlan to $newPhysicalPlan" )
310+ logOnLevel(" Plan changed:\n " +
311+ sideBySide(currentPhysicalPlan.treeString, newPhysicalPlan.treeString).mkString(" \n " ))
310312 cleanUpTempTags(newPhysicalPlan)
311313 currentPhysicalPlan = newPhysicalPlan
312314 currentLogicalPlan = newLogicalPlan
@@ -335,7 +337,7 @@ case class AdaptiveSparkPlanExec(
335337 if (! isSubquery && currentPhysicalPlan.exists(_.subqueries.nonEmpty)) {
336338 getExecutionId.foreach(onUpdatePlan(_, Seq .empty))
337339 }
338- logOnLevel(s " Final plan: $currentPhysicalPlan" )
340+ logOnLevel(s " Final plan: \n $currentPhysicalPlan" )
339341 }
340342
341343 override def executeCollect (): Array [InternalRow ] = {
0 commit comments