Skip to content

Commit 090beea

Browse files
committed
Revert changes related to SPARK-2678, decided to move them to another PR
1 parent 21c6cf4 commit 090beea

File tree

9 files changed

+67
-54
lines changed

9 files changed

+67
-54
lines changed

bin/beeline

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,5 +20,26 @@
2020
# Figure out where Spark is installed
2121
FWDIR="$(cd `dirname $0`/..; pwd)"
2222

23+
# Find the java binary
24+
if [ -n "${JAVA_HOME}" ]; then
25+
RUNNER="${JAVA_HOME}/bin/java"
26+
else
27+
if [ `command -v java` ]; then
28+
RUNNER="java"
29+
else
30+
echo "JAVA_HOME is not set" >&2
31+
exit 1
32+
fi
33+
fi
34+
35+
# Compute classpath using external script
36+
classpath_output=$($FWDIR/bin/compute-classpath.sh)
37+
if [[ "$?" != "0" ]]; then
38+
echo "$classpath_output"
39+
exit 1
40+
else
41+
CLASSPATH=$classpath_output
42+
fi
43+
2344
CLASS="org.apache.hive.beeline.BeeLine"
24-
exec "$FWDIR"/bin/spark-submit --class $CLASS spark-internal "$@"
45+
exec "$RUNNER" -cp "$CLASSPATH" $CLASS "$@"

bin/spark-sql

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,9 @@
2020
#
2121
# Shell script for starting the Spark SQL CLI
2222

23+
# Enter posix mode for bash
24+
set -o posix
25+
2326
# Figure out where Spark is installed
2427
FWDIR="$(cd `dirname $0`/..; pwd)"
2528

@@ -30,4 +33,4 @@ if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
3033
fi
3134

3235
CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
33-
exec "$FWDIR"/bin/spark-submit --class $CLASS $@ spark-internal
36+
exec "$FWDIR"/bin/spark-submit --class $CLASS spark-internal $@

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 25 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
204204

205205
/** Fill in values by parsing user options. */
206206
private def parseOpts(opts: Seq[String]): Unit = {
207-
val EQ_SEPARATED_OPT = """(--[^=]+)=(.+)""".r
207+
var inSparkOpts = true
208208

209209
// Delineates parsing of Spark options from parsing of user options.
210210
parse(opts)
@@ -307,21 +307,33 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
307307
verbose = true
308308
parse(tail)
309309

310-
case EQ_SEPARATED_OPT(opt, value) :: tail =>
311-
// convert --foo=bar to --foo bar
312-
parse(opt :: value :: tail)
313-
314-
case value :: tail if value.startsWith("-") =>
315-
SparkSubmit.printErrorAndExit(s"Unrecognized option '$value'.")
316-
317310
case value :: tail =>
318-
primaryResource = if (!SparkSubmit.isShell(value) && !SparkSubmit.isInternal(value)) {
319-
Utils.resolveURI(value).toString
311+
if (inSparkOpts) {
312+
value match {
313+
// convert --foo=bar to --foo bar
314+
case v if v.startsWith("--") && v.contains("=") && v.split("=").size == 2 =>
315+
val parts = v.split("=")
316+
parse(Seq(parts(0), parts(1)) ++ tail)
317+
case v if v.startsWith("-") =>
318+
val errMessage = s"Unrecognized option '$value'."
319+
SparkSubmit.printErrorAndExit(errMessage)
320+
case v =>
321+
primaryResource =
322+
if (!SparkSubmit.isShell(v) && !SparkSubmit.isInternal(v)) {
323+
Utils.resolveURI(v).toString
324+
} else {
325+
v
326+
}
327+
inSparkOpts = false
328+
isPython = SparkSubmit.isPython(v)
329+
parse(tail)
330+
}
320331
} else {
321-
value
332+
if (!value.isEmpty) {
333+
childArgs += value
334+
}
335+
parse(tail)
322336
}
323-
isPython = SparkSubmit.isPython(value)
324-
childArgs ++= tail.filter(_.nonEmpty)
325337

326338
case Nil =>
327339
}

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -101,10 +101,9 @@ class SparkSubmitSuite extends FunSuite with Matchers {
101101
"--class", "Foo",
102102
"userjar.jar",
103103
"some",
104-
"--class", "Bar",
105104
"--weird", "args")
106105
val appArgs = new SparkSubmitArguments(clArgs)
107-
appArgs.childArgs should be (Seq("some", "--class", "Bar", "--weird", "args"))
106+
appArgs.childArgs should be (Seq("some", "--weird", "args"))
108107
}
109108

110109
test("handles YARN cluster mode") {

sbin/start-thriftserver.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,4 +33,4 @@ if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
3333
fi
3434

3535
CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
36-
exec "$FWDIR"/bin/spark-submit --class $CLASS $@ spark-internal
36+
exec "$FWDIR"/bin/spark-submit --class $CLASS spark-internal $@

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,14 @@
1717

1818
package org.apache.spark.sql.hive.thriftserver
1919

20+
import scala.collection.JavaConversions._
21+
2022
import org.apache.commons.logging.LogFactory
2123
import org.apache.hadoop.hive.conf.HiveConf
2224
import org.apache.hadoop.hive.ql.session.SessionState
2325
import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService
2426
import org.apache.hive.service.server.{HiveServer2, ServerOptionsProcessor}
27+
2528
import org.apache.spark.sql.Logging
2629
import org.apache.spark.sql.hive.HiveContext
2730
import org.apache.spark.sql.hive.thriftserver.ReflectionUtils._
@@ -45,6 +48,9 @@ private[hive] object HiveThriftServer2 extends Logging {
4548

4649
// Set all properties specified via command line.
4750
val hiveConf: HiveConf = ss.getConf
51+
hiveConf.getAllProperties.toSeq.sortBy(_._1).foreach { case (k, v) =>
52+
logger.debug(s"HiveConf var: $k=$v")
53+
}
4854

4955
SessionState.start(ss)
5056

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/package.scala

Lines changed: 0 additions & 25 deletions
This file was deleted.

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,9 @@ class CliSuite extends FunSuite with BeforeAndAfterAll with TestUtils {
3030

3131
override def beforeAll() {
3232
val pb = new ProcessBuilder(
33-
"../../bin/spark-class",
34-
"-Dspark.master=local",
35-
SparkSQLCLIDriver.getClass.getCanonicalName.stripSuffix("$"),
33+
"../../bin/spark-sql",
34+
"--master",
35+
"local",
3636
"--hiveconf",
3737
s"javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=$METASTORE_PATH;create=true",
3838
"--hiveconf",

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ import org.scalatest.{BeforeAndAfterAll, FunSuite}
2828

2929
import org.apache.spark.sql.Logging
3030
import org.apache.spark.sql.catalyst.util.getTempFilePath
31-
import org.apache.spark.sql.hive.test.TestHive
3231

3332
/**
3433
* Test for the HiveThriftServer2 using JDBC.
@@ -58,16 +57,14 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with TestUt
5857
// hard to clean up Hive resources entirely, so we just start a new process and kill
5958
// that process for cleanup.
6059
val defaultArgs = Seq(
61-
"../../bin/spark-class",
62-
"-Dspark.master=local",
63-
HiveThriftServer2.getClass.getCanonicalName.stripSuffix("$"),
60+
"../../sbin/start-thriftserver.sh",
61+
"--master local",
6462
"--hiveconf",
6563
"hive.root.logger=INFO,console",
6664
"--hiveconf",
67-
"\"javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=" + METASTORE_PATH +
68-
";create=true\"",
65+
s"javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=$METASTORE_PATH;create=true",
6966
"--hiveconf",
70-
"\"hive.metastore.warehouse.dir=" + WAREHOUSE_PATH + "\"")
67+
s"hive.metastore.warehouse.dir=$WAREHOUSE_PATH")
7168
val pb = new ProcessBuilder(defaultArgs ++ args)
7269
process = pb.start()
7370
inputReader = new BufferedReader(new InputStreamReader(process.getInputStream))
@@ -121,7 +118,7 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with TestUt
121118

122119
def getConnection: Connection = {
123120
val connectURI = s"jdbc:hive2://localhost:$PORT/"
124-
DriverManager.getConnection(connectURI, "", "")
121+
DriverManager.getConnection(connectURI, System.getProperty("user.name"), "")
125122
}
126123

127124
def createStatement(): Statement = getConnection.createStatement()

0 commit comments

Comments
 (0)