Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -57,30 +57,31 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
for (i <- 0 until 3) {
val preferredMirror =
Seq("wget", "https://www.apache.org/dyn/closer.lua?preferred=true", "-q", "-O", "-").!!.trim
val url = s"$preferredMirror/spark/spark-$version/spark-$version-bin-hadoop2.7.tgz"
val filename = s"spark-$version-bin-hadoop2.7.tgz"
val url = s"$preferredMirror/spark/spark-$version/$filename"
logInfo(s"Downloading Spark $version from $url")
if (Seq("wget", url, "-q", "-P", path).! == 0) {
return
val downloaded = new File(sparkTestingDir, filename).getCanonicalPath
val targetDir = new File(sparkTestingDir, s"spark-$version").getCanonicalPath

Seq("mkdir", targetDir).!
val exitCode = Seq("tar", "-xzf", downloaded, "-C", targetDir, "--strip-components=1").!
Seq("rm", downloaded).!

// For a corrupted file, `tar` returns non-zero values. However, we also need to check
// the extracted file because `tar` returns 0 for empty file.
val sparkSubmit = new File(sparkTestingDir, s"spark-$version/bin/spark-submit")
if (exitCode == 0 && sparkSubmit.exists()) {
return
} else {
Seq("rm", "-rf", targetDir).!
}
}
logWarning(s"Failed to download Spark $version from $url")
}
fail(s"Unable to download Spark $version")
}


private def downloadSpark(version: String): Unit = {
tryDownloadSpark(version, sparkTestingDir.getCanonicalPath)

val downloaded = new File(sparkTestingDir, s"spark-$version-bin-hadoop2.7.tgz").getCanonicalPath
val targetDir = new File(sparkTestingDir, s"spark-$version").getCanonicalPath

Seq("mkdir", targetDir).!

Seq("tar", "-xzf", downloaded, "-C", targetDir, "--strip-components=1").!

Seq("rm", downloaded).!
}

private def genDataDir(name: String): String = {
new File(tmpDataDir, name).getCanonicalPath
}
Expand Down Expand Up @@ -125,7 +126,7 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
PROCESS_TABLES.testingVersions.zipWithIndex.foreach { case (version, index) =>
val sparkHome = new File(sparkTestingDir, s"spark-$version")
if (!sparkHome.exists()) {
downloadSpark(version)
tryDownloadSpark(version, sparkTestingDir.getCanonicalPath)
}

val args = Seq(
Expand Down