Skip to content

Commit 8a6bbe0

Browse files
weiqingysrowen
authored andcommitted
[MINOR][SQL] Use resource path for test_script.sh
## What changes were proposed in this pull request? This PR modified the test case `test("script")` to use resource path for `test_script.sh`. Make the test case portable (even in IntelliJ). ## How was this patch tested? Passed the test case. Before: Run `test("script")` in IntelliJ: ``` Caused by: org.apache.spark.SparkException: Subprocess exited with status 127. Error: bash: src/test/resources/test_script.sh: No such file or directory ``` After: Test passed. Author: Weiqing Yang <[email protected]> Closes #15246 from weiqingy/hivetest.
1 parent 4201ddc commit 8a6bbe0

File tree

6 files changed

+28
-12
lines changed

6 files changed

+28
-12
lines changed

core/src/test/scala/org/apache/spark/SparkFunSuite.scala

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
package org.apache.spark
1919

2020
// scalastyle:off
21+
import java.io.File
22+
2123
import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}
2224

2325
import org.apache.spark.internal.Logging
@@ -41,6 +43,15 @@ abstract class SparkFunSuite
4143
}
4244
}
4345

46+
// helper function
47+
protected final def getTestResourceFile(file: String): File = {
48+
new File(getClass.getClassLoader.getResource(file).getFile)
49+
}
50+
51+
protected final def getTestResourcePath(file: String): String = {
52+
getTestResourceFile(file).getCanonicalPath
53+
}
54+
4455
/**
4556
* Log the suite name and the test name before and after each test.
4657
*

core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -59,16 +59,16 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
5959
with JsonTestUtils with Eventually with WebBrowser with LocalSparkContext
6060
with ResetSystemProperties {
6161

62-
private val logDir = new File("src/test/resources/spark-events")
63-
private val expRoot = new File("src/test/resources/HistoryServerExpectations/")
62+
private val logDir = getTestResourcePath("spark-events")
63+
private val expRoot = getTestResourceFile("HistoryServerExpectations")
6464

6565
private var provider: FsHistoryProvider = null
6666
private var server: HistoryServer = null
6767
private var port: Int = -1
6868

6969
def init(): Unit = {
7070
val conf = new SparkConf()
71-
.set("spark.history.fs.logDirectory", logDir.getAbsolutePath)
71+
.set("spark.history.fs.logDirectory", logDir)
7272
.set("spark.history.fs.update.interval", "0")
7373
.set("spark.testing", "true")
7474
provider = new FsHistoryProvider(conf)

core/src/test/scala/org/apache/spark/ui/UISuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,9 +53,10 @@ class UISuite extends SparkFunSuite {
5353
}
5454

5555
private def sslEnabledConf(): (SparkConf, SSLOptions) = {
56+
val keyStoreFilePath = getTestResourcePath("spark.keystore")
5657
val conf = new SparkConf()
5758
.set("spark.ssl.ui.enabled", "true")
58-
.set("spark.ssl.ui.keyStore", "./src/test/resources/spark.keystore")
59+
.set("spark.ssl.ui.keyStore", keyStoreFilePath)
5960
.set("spark.ssl.ui.keyStorePassword", "123456")
6061
.set("spark.ssl.ui.keyPassword", "123456")
6162
(conf, new SecurityManager(conf).getSSLOptions("ui"))

sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
4545

4646
// Used for generating new query answer files by saving
4747
private val regenerateGoldenFiles: Boolean = System.getenv("SPARK_GENERATE_GOLDEN_FILES") == "1"
48-
private val goldenSQLPath = "src/test/resources/sqlgen/"
48+
private val goldenSQLPath = getTestResourcePath("sqlgen")
4949

5050
protected override def beforeAll(): Unit = {
5151
super.beforeAll()

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,8 @@ class HiveSparkSubmitSuite
152152
case v if v.startsWith("2.10") || v.startsWith("2.11") => v.substring(0, 4)
153153
case x => throw new Exception(s"Unsupported Scala Version: $x")
154154
}
155-
val testJar = s"sql/hive/src/test/resources/regression-test-SPARK-8489/test-$version.jar"
155+
val jarDir = getTestResourcePath("regression-test-SPARK-8489")
156+
val testJar = s"$jarDir/test-$version.jar"
156157
val args = Seq(
157158
"--conf", "spark.ui.enabled=false",
158159
"--conf", "spark.master.rest.enabled=false",

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -66,13 +66,14 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
6666
import spark.implicits._
6767

6868
test("script") {
69+
val scriptFilePath = getTestResourcePath("test_script.sh")
6970
if (testCommandAvailable("bash") && testCommandAvailable("echo | sed")) {
7071
val df = Seq(("x1", "y1", "z1"), ("x2", "y2", "z2")).toDF("c1", "c2", "c3")
7172
df.createOrReplaceTempView("script_table")
7273
val query1 = sql(
73-
"""
74+
s"""
7475
|SELECT col1 FROM (from(SELECT c1, c2, c3 FROM script_table) tempt_table
75-
|REDUCE c1, c2, c3 USING 'bash src/test/resources/test_script.sh' AS
76+
|REDUCE c1, c2, c3 USING 'bash $scriptFilePath' AS
7677
|(col1 STRING, col2 STRING)) script_test_table""".stripMargin)
7778
checkAnswer(query1, Row("x1_y1") :: Row("x2_y2") :: Nil)
7879
}
@@ -1290,11 +1291,12 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
12901291
.selectExpr("id AS a", "id AS b")
12911292
.createOrReplaceTempView("test")
12921293

1294+
val scriptFilePath = getTestResourcePath("data")
12931295
checkAnswer(
12941296
sql(
1295-
"""FROM(
1297+
s"""FROM(
12961298
| FROM test SELECT TRANSFORM(a, b)
1297-
| USING 'python src/test/resources/data/scripts/test_transform.py "\t"'
1299+
| USING 'python $scriptFilePath/scripts/test_transform.py "\t"'
12981300
| AS (c STRING, d STRING)
12991301
|) t
13001302
|SELECT c
@@ -1308,12 +1310,13 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
13081310
.selectExpr("id AS a", "id AS b")
13091311
.createOrReplaceTempView("test")
13101312

1313+
val scriptFilePath = getTestResourcePath("data")
13111314
val df = sql(
1312-
"""FROM test
1315+
s"""FROM test
13131316
|SELECT TRANSFORM(a, b)
13141317
|ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
13151318
|WITH SERDEPROPERTIES('field.delim' = '|')
1316-
|USING 'python src/test/resources/data/scripts/test_transform.py "|"'
1319+
|USING 'python $scriptFilePath/scripts/test_transform.py "|"'
13171320
|AS (c STRING, d STRING)
13181321
|ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
13191322
|WITH SERDEPROPERTIES('field.delim' = '|')

0 commit comments

Comments
 (0)