Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ import org.apache.spark.util.Utils
private[hive] object SparkSQLCLIDriver {
private var prompt = "spark-sql"
private var continuedPrompt = "".padTo(prompt.length, ' ')
private var transport:TSocket = _
private var transport: TSocket = _

installSignalHandler()

Expand Down Expand Up @@ -276,13 +276,13 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {

driver.init()
val out = sessionState.out
val start:Long = System.currentTimeMillis()
val start: Long = System.currentTimeMillis()
if (sessionState.getIsVerbose) {
out.println(cmd)
}
val rc = driver.run(cmd)
val end = System.currentTimeMillis()
val timeTaken:Double = (end - start) / 1000.0
val timeTaken: Double = (end - start) / 1000.0

ret = rc.getResponseCode
if (ret != 0) {
Expand Down Expand Up @@ -310,7 +310,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
res.clear()
}
} catch {
case e:IOException =>
case e: IOException =>
console.printError(
s"""Failed with exception ${e.getClass.getName}: ${e.getMessage}
|${org.apache.hadoop.util.StringUtils.stringifyException(e)}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,15 +77,15 @@ private[ui] class ThriftServerPage(parent: ThriftServerTab) extends WebUIPage(""
[{id}]
</a>
}
val detail = if(info.state == ExecutionState.FAILED) info.detail else info.executePlan
val detail = if (info.state == ExecutionState.FAILED) info.detail else info.executePlan
<tr>
<td>{info.userName}</td>
<td>
{jobLink}
</td>
<td>{info.groupId}</td>
<td>{formatDate(info.startTimestamp)}</td>
<td>{if(info.finishTimestamp > 0) formatDate(info.finishTimestamp)}</td>
<td>{if (info.finishTimestamp > 0) formatDate(info.finishTimestamp)}</td>
<td>{formatDurationOption(Some(info.totalTime))}</td>
<td>{info.statement}</td>
<td>{info.state}</td>
Expand Down Expand Up @@ -150,7 +150,7 @@ private[ui] class ThriftServerPage(parent: ThriftServerTab) extends WebUIPage(""
<td> {session.ip} </td>
<td> <a href={sessionLink}> {session.sessionId} </a> </td>
<td> {formatDate(session.startTimestamp)} </td>
<td> {if(session.finishTimestamp > 0) formatDate(session.finishTimestamp)} </td>
<td> {if (session.finishTimestamp > 0) formatDate(session.finishTimestamp)} </td>
<td> {formatDurationOption(Some(session.totalTime))} </td>
<td> {session.totalExecution.toString} </td>
</tr>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ private[ui] class ThriftServerSessionPage(parent: ThriftServerTab)
[{id}]
</a>
}
val detail = if(info.state == ExecutionState.FAILED) info.detail else info.executePlan
val detail = if (info.state == ExecutionState.FAILED) info.detail else info.executePlan
<tr>
<td>{info.userName}</td>
<td>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ class UISeleniumSuite
}

ignore("thrift server ui test") {
withJdbcStatement(statement =>{
withJdbcStatement { statement =>
val baseURL = s"http://localhost:$uiPort"

val queries = Seq(
Expand All @@ -97,6 +97,6 @@ class UISeleniumSuite
findAll(cssSelector("""ul table tbody tr td""")).map(_.text).toList should contain (line)
}
}
})
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ import org.apache.spark.sql.hive.execution.{AddJar, AddFile, HiveNativeCommand}
private[hive] class ExtendedHiveQlParser extends AbstractSparkSQLParser {
// Keyword is a convention with AbstractSparkSQLParser, which will scan all of the `Keyword`
// properties via reflection the class in runtime for constructing the SqlLexical object
protected val ADD = Keyword("ADD")
protected val DFS = Keyword("DFS")
protected val ADD = Keyword("ADD")
protected val DFS = Keyword("DFS")
protected val FILE = Keyword("FILE")
protected val JAR = Keyword("JAR")
protected val JAR = Keyword("JAR")

protected lazy val start: Parser[LogicalPlan] = dfs | addJar | addFile | hiveQl

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -527,7 +527,7 @@ private[hive] object HiveContext {
val propMap: HashMap[String, String] = HashMap()
// We have to mask all properties in hive-site.xml that relates to metastore data source
// as we used a local metastore here.
HiveConf.ConfVars.values().foreach { confvar =>
HiveConf.ConfVars.values().foreach { confvar =>
if (confvar.varname.contains("datanucleus") || confvar.varname.contains("jdo")) {
propMap.put(confvar.varname, confvar.defaultVal)
}
Expand All @@ -550,7 +550,7 @@ private[hive] object HiveContext {
}.mkString("{", ",", "}")
case (seq: Seq[_], ArrayType(typ, _)) =>
seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]")
case (map: Map[_,_], MapType(kType, vType, _)) =>
case (map: Map[_, _], MapType(kType, vType, _)) =>
map.map {
case (key, value) =>
toHiveStructString((key, kType)) + ":" + toHiveStructString((value, vType))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ private[hive] trait HiveInspectors {
val allRefs = si.getAllStructFieldRefs
new GenericRow(
allRefs.map(r =>
unwrap(si.getStructFieldData(data,r), r.getFieldObjectInspector)).toArray)
unwrap(si.getStructFieldData(data, r), r.getFieldObjectInspector)).toArray)
}


Expand Down Expand Up @@ -561,8 +561,8 @@ private[hive] trait HiveInspectors {
case DecimalType() => PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector
case StructType(fields) =>
ObjectInspectorFactory.getStandardStructObjectInspector(
java.util.Arrays.asList(fields.map(f => f.name) :_*),
java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)) :_*))
java.util.Arrays.asList(fields.map(f => f.name) : _*),
java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)) : _*))
}

/**
Expand Down Expand Up @@ -677,8 +677,8 @@ private[hive] trait HiveInspectors {
getListTypeInfo(elemType.toTypeInfo)
case StructType(fields) =>
getStructTypeInfo(
java.util.Arrays.asList(fields.map(_.name) :_*),
java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo) :_*))
java.util.Arrays.asList(fields.map(_.name) : _*),
java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo) : _*))
case MapType(keyType, valueType, _) =>
getMapTypeInfo(keyType.toTypeInfo, valueType.toTypeInfo)
case BinaryType => binaryTypeInfo
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -546,13 +546,17 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
* UNIMPLEMENTED: It needs to be decided how we will persist in-memory tables to the metastore.
* For now, if this functionality is desired mix in the in-memory [[OverrideCatalog]].
*/
override def registerTable(tableIdentifier: Seq[String], plan: LogicalPlan): Unit = ???
override def registerTable(tableIdentifier: Seq[String], plan: LogicalPlan): Unit = {
throw new UnsupportedOperationException
}

/**
* UNIMPLEMENTED: It needs to be decided how we will persist in-memory tables to the metastore.
* For now, if this functionality is desired mix in the in-memory [[OverrideCatalog]].
*/
override def unregisterTable(tableIdentifier: Seq[String]): Unit = ???
override def unregisterTable(tableIdentifier: Seq[String]): Unit = {
throw new UnsupportedOperationException
}

override def unregisterAllTables(): Unit = {}
}
Expand Down Expand Up @@ -725,7 +729,7 @@ private[hive] case class MetastoreRelation
val output = attributes ++ partitionKeys

/** An attribute map that can be used to lookup original attributes based on expression id. */
val attributeMap = AttributeMap(output.map(o => (o,o)))
val attributeMap = AttributeMap(output.map(o => (o, o)))

/** An attribute map for determining the ordinal for non-partition columns. */
val columnOrdinals = AttributeMap(attributes.zipWithIndex)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -665,7 +665,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
HiveColumn(field.getName, field.getType, field.getComment)
})
}
case Token("TOK_TABLEROWFORMAT", Token("TOK_SERDEPROPS", child :: Nil) :: Nil)=>
case Token("TOK_TABLEROWFORMAT", Token("TOK_SERDEPROPS", child :: Nil) :: Nil) =>
val serdeParams = new java.util.HashMap[String, String]()
child match {
case Token("TOK_TABLEROWFORMATFIELD", rowChild1 :: rowChild2) =>
Expand Down Expand Up @@ -775,7 +775,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C

// Support "TRUNCATE TABLE table_name [PARTITION partition_spec]"
case Token("TOK_TRUNCATETABLE",
Token("TOK_TABLE_PARTITION",table)::Nil) => NativePlaceholder
Token("TOK_TABLE_PARTITION", table) :: Nil) => NativePlaceholder

case Token("TOK_QUERY", queryArgs)
if Seq("TOK_FROM", "TOK_INSERT").contains(queryArgs.head.getText) =>
Expand Down Expand Up @@ -1151,7 +1151,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
case Seq(false, false) => Inner
}.toBuffer

val joinedTables = tables.reduceLeft(Join(_,_, Inner, None))
val joinedTables = tables.reduceLeft(Join(_, _, Inner, None))

// Must be transform down.
val joinedResult = joinedTables transform {
Expand All @@ -1171,7 +1171,8 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
// worth the number of hacks that will be required to implement it. Namely, we need to add
// some sort of mapped star expansion that would expand all child output row to be similarly
// named output expressions where some aggregate expression has been applied (i.e. First).
??? // Aggregate(groups, Star(None, First(_)) :: Nil, joinedResult)
// Aggregate(groups, Star(None, First(_)) :: Nil, joinedResult)
throw new UnsupportedOperationException

case Token(allJoinTokens(joinToken),
relation1 ::
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -194,10 +194,9 @@ case class InsertIntoHiveTable(
if (partition.nonEmpty) {

// loadPartition call orders directories created on the iteration order of the this map
val orderedPartitionSpec = new util.LinkedHashMap[String,String]()
table.hiveQlTable.getPartCols().foreach{
entry=>
orderedPartitionSpec.put(entry.getName,partitionSpec.get(entry.getName).getOrElse(""))
val orderedPartitionSpec = new util.LinkedHashMap[String, String]()
table.hiveQlTable.getPartCols().foreach { entry =>
orderedPartitionSpec.put(entry.getName, partitionSpec.get(entry.getName).getOrElse(""))
}
val partVals = MetaStoreUtils.getPvals(table.hiveQlTable.getPartCols, partitionSpec)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ case class HiveScriptIOSchema (
val columnTypes = attrs.map {
case aref: AttributeReference => aref.dataType
case e: NamedExpression => e.dataType
case _ => null
case _ => null
}

(columns, columnTypes)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ private[hive] case class HiveWindowFunction(

// The object inspector of values returned from the Hive window function.
@transient
protected lazy val returnInspector = {
protected lazy val returnInspector = {
evaluator.init(GenericUDAFEvaluator.Mode.COMPLETE, inputInspectors)
}

Expand Down Expand Up @@ -410,7 +410,7 @@ private[hive] case class HiveGenericUdaf(
protected lazy val resolver: AbstractGenericUDAFResolver = funcWrapper.createFunction()

@transient
protected lazy val objectInspector = {
protected lazy val objectInspector = {
val parameterInfo = new SimpleGenericUDAFParameterInfo(inspectors.toArray, false, false)
resolver.getEvaluator(parameterInfo)
.init(GenericUDAFEvaluator.Mode.COMPLETE, inspectors.toArray)
Expand Down Expand Up @@ -443,7 +443,7 @@ private[hive] case class HiveUdaf(
new GenericUDAFBridge(funcWrapper.createFunction())

@transient
protected lazy val objectInspector = {
protected lazy val objectInspector = {
val parameterInfo = new SimpleGenericUDAFParameterInfo(inspectors.toArray, false, false)
resolver.getEvaluator(parameterInfo)
.init(GenericUDAFEvaluator.Mode.COMPLETE, inspectors.toArray)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ private[hive] class SparkHiveWriterContainer(
@transient protected lazy val jobContext = newJobContext(conf.value, jID.value)
@transient private lazy val taskContext = newTaskAttemptContext(conf.value, taID.value)
@transient private lazy val outputFormat =
conf.value.getOutputFormat.asInstanceOf[HiveOutputFormat[AnyRef,Writable]]
conf.value.getOutputFormat.asInstanceOf[HiveOutputFormat[AnyRef, Writable]]

def driverSideSetup() {
setIDs(0, 0, 0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
}
}

case class TestTable(name: String, commands: (()=>Unit)*)
case class TestTable(name: String, commands: (() => Unit)*)

protected[hive] implicit class SqlCmd(sql: String) {
def cmd: () => Unit = {
Expand Down Expand Up @@ -253,8 +253,8 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
| 'serialization.format'='${classOf[TBinaryProtocol].getName}'
|)
|STORED AS
|INPUTFORMAT '${classOf[SequenceFileInputFormat[_,_]].getName}'
|OUTPUTFORMAT '${classOf[SequenceFileOutputFormat[_,_]].getName}'
|INPUTFORMAT '${classOf[SequenceFileInputFormat[_, _]].getName}'
|OUTPUTFORMAT '${classOf[SequenceFileOutputFormat[_, _]].getName}'
""".stripMargin)

runSqlHive(
Expand Down