|
17 | 17 |
|
18 | 18 | package org.apache.spark.sql.hive.execution |
19 | 19 |
|
| 20 | +import scala.util.Try |
| 21 | + |
20 | 22 | import org.apache.spark.sql.hive.test.TestHive |
21 | 23 | import org.apache.spark.sql.hive.test.TestHive._ |
22 | | -import org.apache.spark.sql.{execution, Row} |
| 24 | +import org.apache.spark.sql.{SchemaRDD, execution, Row} |
23 | 25 |
|
24 | 26 | /** |
25 | 27 | * A set of test cases expressed in Hive QL that are not covered by the tests included in the hive distribution. |
@@ -162,21 +164,60 @@ class HiveQuerySuite extends HiveComparisonTest { |
162 | 164 | hql("SELECT * FROM src").toString |
163 | 165 | } |
164 | 166 |
|
| 167 | + private val explainCommandClassName = |
| 168 | + classOf[execution.ExplainCommand].getSimpleName.stripSuffix("$") |
| 169 | + |
| 170 | + def isExplanation(result: SchemaRDD) = { |
| 171 | + val explanation = result.select('plan).collect().map { case Row(plan: String) => plan } |
| 172 | + explanation.size == 1 && explanation.head.startsWith(explainCommandClassName) |
| 173 | + } |
| 174 | + |
165 | 175 | test("SPARK-1704: Explain commands as a SchemaRDD") { |
166 | 176 | hql("CREATE TABLE IF NOT EXISTS src (key INT, value STRING)") |
167 | 177 |
|
168 | 178 | val rdd = hql("explain select key, count(value) from src group by key") |
169 | | - val explanation = rdd.select('plan).collect().map { |
170 | | - case Row(plan: String) => plan |
| 179 | + assert(isExplanation(rdd)) |
| 180 | + |
| 181 | + TestHive.reset() |
| 182 | + } |
| 183 | + |
| 184 | + test("Query Hive native command execution result") { |
| 185 | + val tableName = "test_native_commands" |
| 186 | + |
| 187 | + val q0 = hql(s"DROP TABLE IF EXISTS $tableName") |
| 188 | + assert(q0.count() == 0) |
| 189 | + |
| 190 | + val q1 = hql(s"CREATE TABLE $tableName(key INT, value STRING)") |
| 191 | + assert(q1.count() == 0) |
| 192 | + |
| 193 | + val q2 = hql("SHOW TABLES") |
| 194 | + val tables = q2.select('result).collect().map { case Row(table: String) => table } |
| 195 | + assert(tables.contains(tableName)) |
| 196 | + |
| 197 | + val q3 = hql(s"DESCRIBE $tableName") |
| 198 | + assertResult(Array(Array("key", "int", "None"), Array("value", "string", "None"))) { |
| 199 | + q3.select('result).collect().map { case Row(fieldDesc: String) => |
| 200 | + fieldDesc.split("\t").map(_.trim) |
| 201 | + } |
171 | 202 | } |
172 | | - assert(explanation.size == 1) |
173 | 203 |
|
174 | | - val explainCommandClassName = classOf[execution.ExplainCommand].getSimpleName.stripSuffix("$") |
175 | | - assert(explanation.head.contains(explainCommandClassName)) |
| 204 | + val q4 = hql(s"EXPLAIN SELECT key, COUNT(*) FROM $tableName GROUP BY key") |
| 205 | + assert(isExplanation(q4)) |
176 | 206 |
|
177 | 207 | TestHive.reset() |
178 | 208 | } |
179 | 209 |
|
| 210 | + test("Exactly once semantics for DDL and command statements") { |
| 211 | + val tableName = "test_exactly_once" |
| 212 | + val q0 = hql(s"CREATE TABLE $tableName(key INT, value STRING)") |
| 213 | + |
| 214 | + // If the table was not created, the following assertion would fail |
| 215 | + assert(Try(table(tableName)).isSuccess) |
| 216 | + |
| 217 | + // If the CREATE TABLE command got executed again, the following assertion would fail |
| 218 | + assert(Try(q0.count()).isSuccess) |
| 219 | + } |
| 220 | + |
180 | 221 | test("parse HQL set commands") { |
181 | 222 | // Adapted from its SQL counterpart. |
182 | 223 | val testKey = "spark.sql.key.usedfortestonly" |
|
0 commit comments