Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ package org.apache.spark.sql.hive.execution

import java.io._

import scala.util.control.NonFatal

import org.scalatest.{BeforeAndAfterAll, GivenWhenThen}

import org.apache.spark.{Logging, SparkFunSuite}
Expand Down Expand Up @@ -386,11 +388,45 @@ abstract class HiveComparisonTest
hiveCacheFiles.foreach(_.delete())
}

// If this query is reading other tables that were created during this test run
// also print out the query plans and results for those.
val computedTablesMessages: String = try {
val tablesRead = new TestHive.QueryExecution(query).executedPlan.collect {
case ts: HiveTableScan => ts.relation.tableName
}.toSet

TestHive.reset()
val executions = queryList.map(new TestHive.QueryExecution(_))
executions.foreach(_.toRdd)
val tablesGenerated = queryList.zip(executions).flatMap {
case (q, e) => e.executedPlan.collect {
case i: InsertIntoHiveTable if tablesRead contains i.table.tableName =>
(q, e, i)
}
}

tablesGenerated.map { case (hiveql, execution, insert) =>
s"""
|=== Generated Table ===
|$hiveql
|$execution
|== Results ==
|${insert.child.execute().collect().mkString("\n")}
""".stripMargin
}.mkString("\n")

} catch {
case NonFatal(e) =>
logError("Failed to compute generated tables", e)
s"Couldn't compute dependent tables: $e"
}

val errorMessage =
s"""
|Results do not match for $testCaseName:
|$hiveQuery\n${hiveQuery.analyzed.output.map(_.name).mkString("\t")}
|$resultComparison
|$computedTablesMessages
""".stripMargin

stringToFile(new File(wrongDirectory, testCaseName), errorMessage + consoleTestCase)
Expand Down