Skip to content

Commit c0f2fa0

Browse files
committed
extends HiveThriftJdbcTest to start/stop thriftserver for UI test
1 parent aa20408 commit c0f2fa0

File tree

2 files changed

+37
-68
lines changed

2 files changed

+37
-68
lines changed

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -409,24 +409,24 @@ abstract class HiveThriftServer2Test extends FunSuite with BeforeAndAfterAll wit
409409
private val CLASS_NAME = HiveThriftServer2.getClass.getCanonicalName.stripSuffix("$")
410410
private val LOG_FILE_MARK = s"starting $CLASS_NAME, logging to "
411411

412-
private val startScript = "../../sbin/start-thriftserver.sh".split("/").mkString(File.separator)
413-
private val stopScript = "../../sbin/stop-thriftserver.sh".split("/").mkString(File.separator)
412+
protected val startScript = "../../sbin/start-thriftserver.sh".split("/").mkString(File.separator)
413+
protected val stopScript = "../../sbin/stop-thriftserver.sh".split("/").mkString(File.separator)
414414

415415
private var listeningPort: Int = _
416416
protected def serverPort: Int = listeningPort
417417

418418
protected def user = System.getProperty("user.name")
419419

420-
private var warehousePath: File = _
421-
private var metastorePath: File = _
422-
private def metastoreJdbcUri = s"jdbc:derby:;databaseName=$metastorePath;create=true"
420+
protected var warehousePath: File = _
421+
protected var metastorePath: File = _
422+
protected def metastoreJdbcUri = s"jdbc:derby:;databaseName=$metastorePath;create=true"
423423

424424
private val pidDir: File = Utils.createTempDir("thriftserver-pid")
425425
private var logPath: File = _
426426
private var logTailingProcess: Process = _
427427
private var diagnosisBuffer: ArrayBuffer[String] = ArrayBuffer.empty[String]
428428

429-
private def serverStartCommand(port: Int) = {
429+
protected def serverStartCommand(port: Int) = {
430430
val portConf = if (mode == ServerMode.binary) {
431431
ConfVars.HIVE_SERVER2_THRIFT_PORT
432432
} else {

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala

Lines changed: 31 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -17,110 +17,79 @@
1717

1818
package org.apache.spark.sql.hive.thriftserver
1919

20-
import java.sql.{DriverManager, Statement}
2120

22-
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
23-
import org.apache.hive.jdbc.HiveDriver
24-
import org.apache.spark.sql.hive.HiveContext
25-
import org.apache.spark.sql.hive.thriftserver.HiveThriftServer2.HiveThriftServer2Listener
26-
import org.apache.spark.sql.hive.thriftserver.ui.ThriftServerTab
27-
import org.apache.spark.{SparkConf, SparkContext}
21+
22+
import scala.util.Random
23+
2824
import org.openqa.selenium.WebDriver
2925
import org.openqa.selenium.htmlunit.HtmlUnitDriver
30-
import org.scalatest.{Matchers, BeforeAndAfterAll, FunSuite}
26+
import org.scalatest.{Matchers, BeforeAndAfterAll}
3127
import org.scalatest.concurrent.Eventually._
3228
import org.scalatest.selenium.WebBrowser
3329
import org.scalatest.time.SpanSugar._
3430

31+
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
32+
import org.apache.spark.sql.hive.HiveContext
3533

36-
import scala.io.Source
37-
import scala.util.Random
3834

39-
/**
40-
* Created by tianyi on 4/27/15.
41-
*/
42-
class UISeleniumSuite extends FunSuite with WebBrowser with Matchers with BeforeAndAfterAll {
35+
class UISeleniumSuite extends HiveThriftJdbcTest with WebBrowser with Matchers with BeforeAndAfterAll {
4336

4437
implicit var webDriver: WebDriver = _
4538
var server: HiveThriftServer2 = _
4639
var hc: HiveContext = _
4740
val uiPort = 20000 + Random.nextInt(10000)
48-
val listenPort = 10000 + Random.nextInt(10000)
41+
override def mode: ServerMode.Value = ServerMode.binary
4942

5043
override def beforeAll(): Unit = {
5144
webDriver = new HtmlUnitDriver
52-
startThriftServer
45+
super.beforeAll()
5346
}
5447

5548
override def afterAll(): Unit = {
5649
if (webDriver != null) {
5750
webDriver.quit()
5851
}
59-
if (server != null) {
60-
stopThriftServer
61-
}
52+
super.afterAll()
6253
}
6354

64-
def withMultipleConnectionJdbcStatement(fs: (Statement => Unit)*) {
65-
val user = System.getProperty("user.name")
66-
Class.forName(classOf[HiveDriver].getCanonicalName)
67-
val connections = fs.map {
68-
_ => DriverManager.getConnection(s"jdbc:hive2://localhost:$listenPort/", user, "")
55+
override protected def serverStartCommand(port: Int) = {
56+
val portConf = if (mode == ServerMode.binary) {
57+
ConfVars.HIVE_SERVER2_THRIFT_PORT
58+
} else {
59+
ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT
6960
}
70-
val statements = connections.map(_.createStatement())
7161

72-
try {
73-
statements.zip(fs).map { case (s, f) => f(s) }
74-
} finally {
75-
statements.map(_.close())
76-
connections.map(_.close())
77-
}
78-
}
79-
80-
def withJdbcStatement(f: Statement => Unit) {
81-
withMultipleConnectionJdbcStatement(f)
82-
}
83-
84-
/**
85-
* Create a test SparkContext with the SparkUI enabled.
86-
* It is safe to `get` the SparkUI directly from the SparkContext returned here.
87-
*/
88-
private def startThriftServer: Unit = {
89-
val conf = new SparkConf()
90-
.setMaster("local")
91-
.setAppName("test")
92-
.set("spark.ui.enabled", "true")
93-
.set("spark.ui.port", uiPort.toString)
94-
hc = new HiveContext(new SparkContext(conf))
95-
hc.hiveconf.set(ConfVars.HIVE_SERVER2_THRIFT_PORT.toString, listenPort.toString)
96-
server = new HiveThriftServer2(hc)
97-
server.init(hc.hiveconf)
98-
server.start()
99-
HiveThriftServer2.listener = new HiveThriftServer2Listener(server, hc.conf)
100-
hc.sparkContext.addSparkListener(HiveThriftServer2.listener)
101-
HiveThriftServer2.uiTab = Some(new ThriftServerTab(hc.sparkContext))
102-
}
103-
104-
private def stopThriftServer: Unit = {
105-
server.stop()
62+
s"""$startScript
63+
| --master local
64+
| --hiveconf hive.root.logger=INFO,console
65+
| --hiveconf ${ConfVars.METASTORECONNECTURLKEY}=$metastoreJdbcUri
66+
| --hiveconf ${ConfVars.METASTOREWAREHOUSE}=$warehousePath
67+
| --hiveconf ${ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST}=localhost
68+
| --hiveconf ${ConfVars.HIVE_SERVER2_TRANSPORT_MODE}=$mode
69+
| --hiveconf $portConf=$port
70+
| --driver-class-path ${sys.props("java.class.path")}
71+
| --conf spark.ui.enabled=true
72+
| --conf spark.ui.port=$uiPort
73+
""".stripMargin.split("\\s+").toSeq
10674
}
10775

10876
test("thrift server ui test") {
10977
withJdbcStatement(statement =>{
78+
val baseURL = s"http://localhost:${uiPort}"
79+
11080
val queries = Seq(
111-
"DROP TABLE IF EXISTS test_map",
11281
"CREATE TABLE test_map(key INT, value STRING)",
11382
s"LOAD DATA LOCAL INPATH '${TestData.smallKv}' OVERWRITE INTO TABLE test_map")
11483

11584
queries.foreach(statement.execute)
11685

11786
eventually(timeout(10 seconds), interval(50 milliseconds)) {
118-
go to (hc.sparkContext.ui.get.appUIAddress.stripSuffix("/"))
87+
go to (baseURL)
11988
find(cssSelector("""ul li a[href*="ThriftServer"]""")) should not be(None)
12089
}
12190

12291
eventually(timeout(10 seconds), interval(50 milliseconds)) {
123-
go to (hc.sparkContext.ui.get.appUIAddress.stripSuffix("/") + "/ThriftServer")
92+
go to (baseURL + "/ThriftServer")
12493
find(id("sessionstat")) should not be(None)
12594
find(id("sqlstat")) should not be(None)
12695

0 commit comments

Comments
 (0)