@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars
3333import org .apache .hive .jdbc .HiveDriver
3434import org .scalatest .{BeforeAndAfterAll , FunSuite }
3535
36- import org .apache .spark .Logging
36+ import org .apache .spark .{ SparkException , Logging }
3737import org .apache .spark .sql .catalyst .util ._
3838
3939/**
@@ -58,7 +58,7 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with Loggi
5858 val jdbcUri = s " jdbc:hive2:// $listeningHost: $listeningPort/ "
5959 val user = System .getProperty(" user.name" )
6060
61- override def beforeAll (): Unit = {
61+ override def beforeAll () = {
6262 val timeout : FiniteDuration = 30 .seconds
6363 val serverScript = " ../../sbin/start-thriftserver.sh" .split(" /" ).mkString(File .separator)
6464 val command =
@@ -87,8 +87,10 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with Loggi
8787 // logFile may have not finished, try every second
8888 while (! logFile.exists() || (! fileToString(logFile).contains(
8989 " ThriftBinaryCLIService listening on" ) && tryNum < maxTries)) {
90+ tryNum = tryNum + 1
9091 Thread .sleep(1000 )
9192 }
93+ println(fileToString(logFile))
9294 if (fileToString(logFile).contains(" ThriftBinaryCLIService listening on" )) {
9395 serverStarted.success(())
9496 } else {
@@ -101,7 +103,8 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with Loggi
101103
102104 Future {
103105 val exitValue = process.exitValue()
104- logInfo(s " Start Spark SQL Thrift server process exit value: $exitValue" )
106+ serverStarted.tryFailure(
107+ new SparkException (s " Spark SQL Thrift server process exit value: $exitValue" ))
105108 }
106109
107110 try {
@@ -127,6 +130,7 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with Loggi
127130 |End HiveThriftServer2Suite failure output
128131 |=========================================
129132 """ .stripMargin, cause)
133+ throw cause
130134 } finally {
131135 process.destroy()
132136 }
@@ -138,26 +142,24 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with Loggi
138142 stopThriftserver
139143 }
140144
141- def stopThriftserver : Unit = {
145+ def stopThriftserver = {
142146 val stopScript = " ../../sbin/stop-thriftserver.sh" .split(" /" ).mkString(File .separator)
143147 val builder = new ProcessBuilder (stopScript)
144148 val process = builder.start()
145149 new Thread (" read stderr" ) {
146150 override def run () {
147151 for (line <- Source .fromInputStream(process.getErrorStream).getLines()) {
148- System .err. println(line)
152+ println(line)
149153 }
150154 }
151155 }.start()
152- val output = new StringBuffer
153- val stdoutThread = new Thread (" read stdout" ) {
156+ new Thread (" read stdout" ) {
154157 override def run () {
155158 for (line <- Source .fromInputStream(process.getInputStream).getLines()) {
156- output.append (line)
159+ println (line)
157160 }
158161 }
159- }
160- stdoutThread.start()
162+ }.start()
161163 val exitValue = process.waitFor()
162164 logInfo(s " Stop Spark SQL Thrift server process exit value: $exitValue" )
163165 }
@@ -180,6 +182,7 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with Loggi
180182 assertResult(5 , " Row count mismatch" ) {
181183 val resultSet = statement.executeQuery(" SELECT COUNT(*) FROM test" )
182184 resultSet.next()
185+ // println(s"#######${resultSet.getInt(1)}")
183186 resultSet.getInt(1 )
184187 }
185188 } finally {
@@ -208,6 +211,7 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with Loggi
208211
209212 (0 until 5 ).foreach { _ =>
210213 resultSet.next()
214+ // println(s"#######${resultSet.getInt(1)}")
211215 assert(resultSet.getInt(1 ) === 0 )
212216 assert(resultSet.wasNull())
213217 }
0 commit comments