Skip to content

Commit 0ecea46

Browse files
Changes for HiveQl and HiveContext.
1 parent ce22d80 commit 0ecea46

File tree

3 files changed

+33
-15
lines changed

3 files changed

+33
-15
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,9 @@ case class NativeCommand(cmd: String) extends Command
113113
/**
114114
* Commands of the form "set key=value".
115115
*/
116-
case class SetCommand(key: String, value: String) extends Command
116+
case class SetCommand(key: String, value: String) extends Command {
117+
override def toString = s"<command> set $key=$value"
118+
}
117119

118120
/**
119121
* Returned by a parser when the users only wants to see what query plan would be executed, without

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 28 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -30,16 +30,20 @@ import org.apache.hadoop.hive.ql.Driver
3030
import org.apache.hadoop.hive.ql.processors._
3131
import org.apache.hadoop.hive.ql.session.SessionState
3232

33-
import org.apache.spark.annotation.DeveloperApi
3433
import org.apache.spark.SparkContext
3534
import org.apache.spark.rdd.RDD
3635
import org.apache.spark.sql.catalyst.analysis.{Analyzer, OverrideCatalog}
3736
import org.apache.spark.sql.catalyst.expressions.GenericRow
38-
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, LowerCaseSchema}
39-
import org.apache.spark.sql.catalyst.plans.logical.{NativeCommand, ExplainCommand}
37+
import org.apache.spark.sql.catalyst.plans.logical._
4038
import org.apache.spark.sql.catalyst.ScalaReflection
4139
import org.apache.spark.sql.catalyst.types._
4240
import org.apache.spark.sql.execution._
41+
import org.apache.spark.sql.catalyst.types.StructType
42+
import org.apache.spark.sql.catalyst.plans.logical.NativeCommand
43+
import org.apache.spark.sql.catalyst.plans.logical.ExplainCommand
44+
import org.apache.spark.sql.catalyst.types.ArrayType
45+
import org.apache.spark.sql.catalyst.plans.logical.LowerCaseSchema
46+
import org.apache.spark.sql.catalyst.types.MapType
4347

4448
/* Implicit conversions */
4549
import scala.collection.JavaConversions._
@@ -168,7 +172,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
168172
/**
169173
* Execute the command using Hive and return the results as a sequence. Each element
170174
* in the sequence is one row.
171-
*/
175+
*/ `````
172176
protected def runHive(cmd: String, maxRows: Int = 1000): Seq[String] = {
173177
try {
174178
val cmd_trimmed: String = cmd.trim()
@@ -240,20 +244,30 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
240244
override lazy val optimizedPlan =
241245
optimizer(catalog.PreInsertionCasts(catalog.CreateTables(analyzed)))
242246

243-
override lazy val toRdd: RDD[Row] =
247+
override lazy val toRdd: RDD[Row] = {
248+
249+
def processCmd(cmd: String): RDD[Row] = {
250+
val output = runSqlHive(cmd)
251+
if (output.size == 0) {
252+
emptyResult
253+
} else {
254+
val asRows = output.map(r => new GenericRow(r.split("\t").asInstanceOf[Array[Any]]))
255+
sparkContext.parallelize(asRows, 1)
256+
}
257+
}
258+
244259
analyzed match {
260+
case SetCommand(key, value) =>
261+
logger.debug("inside Hive's toRdd -- matched SetCommand")
262+
// Record the set command inside SQLConf, as well as have Hive execute it.
263+
sqlConf.set(key, value)
264+
processCmd(s"set $key=$value")
245265
case NativeCommand(cmd) =>
246-
val output = runSqlHive(cmd)
247-
248-
if (output.size == 0) {
249-
emptyResult
250-
} else {
251-
val asRows = output.map(r => new GenericRow(r.split("\t").asInstanceOf[Array[Any]]))
252-
sparkContext.parallelize(asRows, 1)
253-
}
266+
processCmd(cmd)
254267
case _ =>
255268
executedPlan.execute().map(_.copy())
256269
}
270+
}
257271

258272
protected val primitiveTypes =
259273
Seq(StringType, IntegerType, LongType, DoubleType, FloatType, BooleanType, ByteType,
@@ -312,6 +326,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
312326
override def simpleString: String =
313327
logical match {
314328
case _: NativeCommand => "<Executed by Hive>"
329+
case _: SetCommand => "<Set Command: Executed by Hive, and noted by SQLContext>"
315330
case _ => executedPlan.toString
316331
}
317332
}

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,8 @@ private[hive] object HiveQl {
208208
def parseSql(sql: String): LogicalPlan = {
209209
try {
210210
if (sql.toLowerCase.startsWith("set")) {
211-
NativeCommand(sql)
211+
val kvPair = sql.drop(3).split("=")
212+
SetCommand(kvPair(0).trim, kvPair(1).trim)
212213
} else if (sql.toLowerCase.startsWith("add jar")) {
213214
AddJar(sql.drop(8))
214215
} else if (sql.toLowerCase.startsWith("add file")) {

0 commit comments

Comments
 (0)