@@ -28,13 +28,12 @@ import org.scalatest.Matchers._
2828
2929import org .apache .spark .SparkException
3030import org .apache .spark .sql .catalyst .TableIdentifier
31- import org .apache .spark .sql .catalyst .plans .logical .{Filter , LocalRelation , OneRowRelation , Union }
31+ import org .apache .spark .sql .catalyst .plans .logical .{Filter , OneRowRelation , Union }
3232import org .apache .spark .sql .execution .{FilterExec , QueryExecution , WholeStageCodegenExec }
3333import org .apache .spark .sql .execution .aggregate .HashAggregateExec
3434import org .apache .spark .sql .execution .exchange .{BroadcastExchangeExec , ReusedExchangeExec , ShuffleExchangeExec }
3535import org .apache .spark .sql .functions ._
3636import org .apache .spark .sql .internal .SQLConf
37- import org .apache .spark .sql .internal .SQLConf .OPTIMIZER_METADATA_ONLY
3837import org .apache .spark .sql .test .{ExamplePoint , ExamplePointUDT , SharedSQLContext }
3938import org .apache .spark .sql .test .SQLTestData .TestData2
4039import org .apache .spark .sql .types ._
@@ -2253,25 +2252,6 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
22532252 assert(mean.collect().toSet === Set (Row (" 0.0345678900000000000000000000000000000" )))
22542253 }
22552254
2256- test(" Incorrect result caused by the rule OptimizeMetadataOnlyQuery" ) {
2257- withSQLConf(OPTIMIZER_METADATA_ONLY .key -> " true" ) {
2258- withTempPath { path =>
2259- val tablePath = new File (s " ${path.getCanonicalPath}/cOl3=c/cOl1=a/cOl5=e " )
2260- Seq ((" a" , " b" , " c" , " d" , " e" )).toDF(" cOl1" , " cOl2" , " cOl3" , " cOl4" , " cOl5" )
2261- .write.json(tablePath.getCanonicalPath)
2262-
2263- val df = spark.read.json(path.getCanonicalPath).select(" CoL1" , " CoL5" , " CoL3" ).distinct()
2264- checkAnswer(df, Row (" a" , " e" , " c" ))
2265-
2266- val localRelation = df.queryExecution.optimizedPlan.collectFirst {
2267- case l : LocalRelation => l
2268- }
2269- assert(localRelation.nonEmpty, " expect to see a LocalRelation" )
2270- assert(localRelation.get.output.map(_.name) == Seq (" cOl3" , " cOl1" , " cOl5" ))
2271- }
2272- }
2273- }
2274-
22752255 test(" SPARK-22520: support code generation for large CaseWhen" ) {
22762256 val N = 30
22772257 var expr1 = when($" id" === lit(0 ), 0 )
0 commit comments