Skip to content

Commit 8045d7a

Browse files
committed
Make sure test suites pass
1 parent 8493a9e commit 8045d7a

File tree

2 files changed

+23
-19
lines changed

2 files changed

+23
-19
lines changed

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,22 +20,23 @@ package org.apache.spark.sql.hive.thriftserver
2020

2121
import java.io.{BufferedReader, InputStreamReader, PrintWriter}
2222

23+
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
2324
import org.scalatest.{BeforeAndAfterAll, FunSuite}
2425

2526
class CliSuite extends FunSuite with BeforeAndAfterAll with TestUtils {
2627
val WAREHOUSE_PATH = TestUtils.getWarehousePath("cli")
2728
val METASTORE_PATH = TestUtils.getMetastorePath("cli")
2829

2930
override def beforeAll() {
30-
val pb = new ProcessBuilder(
31-
"../../bin/spark-sql",
32-
"--master",
33-
"local",
34-
"--hiveconf",
35-
s"javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=$METASTORE_PATH;create=true",
36-
"--hiveconf",
37-
"hive.metastore.warehouse.dir=" + WAREHOUSE_PATH)
38-
31+
val jdbcUrl = s"jdbc:derby:;databaseName=$METASTORE_PATH;create=true"
32+
val commands =
33+
s"""../../bin/spark-sql
34+
| --master local
35+
| --hiveconf ${ConfVars.METASTORECONNECTURLKEY}="$jdbcUrl"
36+
| --hiveconf ${ConfVars.METASTOREWAREHOUSE}=$WAREHOUSE_PATH
37+
""".stripMargin.split("\\s+")
38+
39+
val pb = new ProcessBuilder(commands: _*)
3940
process = pb.start()
4041
outputWriter = new PrintWriter(process.getOutputStream, true)
4142
inputReader = new BufferedReader(new InputStreamReader(process.getInputStream))

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ import java.io.{BufferedReader, InputStreamReader}
2525
import java.net.ServerSocket
2626
import java.sql.{Connection, DriverManager, Statement}
2727

28+
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
2829
import org.scalatest.{BeforeAndAfterAll, FunSuite}
2930

3031
import org.apache.spark.Logging
@@ -63,16 +64,18 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with TestUt
6364
// Forking a new process to start the Hive Thrift server. The reason to do this is it is
6465
// hard to clean up Hive resources entirely, so we just start a new process and kill
6566
// that process for cleanup.
66-
val defaultArgs = Seq(
67-
"../../sbin/start-thriftserver.sh",
68-
"--master local",
69-
"--hiveconf",
70-
"hive.root.logger=INFO,console",
71-
"--hiveconf",
72-
s"javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=$METASTORE_PATH;create=true",
73-
"--hiveconf",
74-
s"hive.metastore.warehouse.dir=$WAREHOUSE_PATH")
75-
val pb = new ProcessBuilder(defaultArgs ++ args)
67+
val jdbcUrl = s"jdbc:derby:;databaseName=$METASTORE_PATH;create=true"
68+
val command =
69+
s"""../../sbin/start-thriftserver.sh
70+
| --master local
71+
| --hiveconf hive.root.logger=INFO,console
72+
| --hiveconf ${ConfVars.METASTORECONNECTURLKEY}="$jdbcUrl"
73+
| --hiveconf ${ConfVars.METASTOREWAREHOUSE}=$METASTORE_PATH
74+
| --hiveconf ${ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST}=$HOST
75+
| --hiveconf ${ConfVars.HIVE_SERVER2_THRIFT_PORT}=$PORT
76+
""".stripMargin.split("\\s+")
77+
78+
val pb = new ProcessBuilder(command ++ args: _*)
7679
val environment = pb.environment()
7780
environment.put("HIVE_SERVER2_THRIFT_PORT", PORT.toString)
7881
environment.put("HIVE_SERVER2_THRIFT_BIND_HOST", HOST)

0 commit comments

Comments
 (0)