Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,12 @@ import org.apache.ivy.plugins.resolver.IBiblioResolver

import org.apache.spark.SparkFunSuite
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
import org.apache.spark.util.Utils

class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {

private var tempIvyPath: String = _

private val noOpOutputStream = new OutputStream {
def write(b: Int) = {}
}
Expand All @@ -47,6 +50,7 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
super.beforeAll()
// We don't want to write logs during testing
SparkSubmitUtils.printStream = new BufferPrintStream
tempIvyPath = Utils.createTempDir(namePrefix = "ivy").getAbsolutePath()
}

test("incorrect maven coordinate throws error") {
Expand Down Expand Up @@ -90,21 +94,20 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
}

test("ivy path works correctly") {
val ivyPath = "dummy" + File.separator + "ivy"
val md = SparkSubmitUtils.getModuleDescriptor
val artifacts = for (i <- 0 until 3) yield new MDArtifact(md, s"jar-$i", "jar", "jar")
var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, new File(ivyPath))
var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, new File(tempIvyPath))
for (i <- 0 until 3) {
val index = jPaths.indexOf(ivyPath)
val index = jPaths.indexOf(tempIvyPath)
assert(index >= 0)
jPaths = jPaths.substring(index + ivyPath.length)
jPaths = jPaths.substring(index + tempIvyPath.length)
}
val main = MavenCoordinate("my.awesome.lib", "mylib", "0.1")
IvyTestUtils.withRepository(main, None, None) { repo =>
// end to end
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, Option(repo),
Option(ivyPath), true)
assert(jarPath.indexOf(ivyPath) >= 0, "should use non-default ivy path")
Option(tempIvyPath), true)
assert(jarPath.indexOf(tempIvyPath) >= 0, "should use non-default ivy path")
}
}

Expand All @@ -123,13 +126,12 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
}
// Local ivy repository with modified home
val dummyIvyPath = "dummy" + File.separator + "ivy"
val dummyIvyLocal = new File(dummyIvyPath, "local" + File.separator)
val dummyIvyLocal = new File(tempIvyPath, "local" + File.separator)
IvyTestUtils.withRepository(main, None, Some(dummyIvyLocal), true) { repo =>
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, None,
Some(dummyIvyPath), true)
Some(tempIvyPath), true)
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
assert(jarPath.indexOf(dummyIvyPath) >= 0, "should be in new ivy path")
assert(jarPath.indexOf(tempIvyPath) >= 0, "should be in new ivy path")
}
}

Expand Down
24 changes: 23 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@
<parquet.deps.scope>compile</parquet.deps.scope>

<!--
Overridable test home. So that you can call individual pom files directory without
Overridable test home. So that you can call individual pom files directly without
things breaking.
-->
<spark.test.home>${session.executionRootDirectory}</spark.test.home>
Expand Down Expand Up @@ -1256,6 +1256,7 @@
<systemProperties>
<derby.system.durability>test</derby.system.durability>
<java.awt.headless>true</java.awt.headless>
<java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
<spark.test.home>${spark.test.home}</spark.test.home>
<spark.testing>1</spark.testing>
<spark.ui.enabled>false</spark.ui.enabled>
Expand Down Expand Up @@ -1289,6 +1290,7 @@
<systemProperties>
<derby.system.durability>test</derby.system.durability>
<java.awt.headless>true</java.awt.headless>
<java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
<spark.test.home>${spark.test.home}</spark.test.home>
<spark.testing>1</spark.testing>
<spark.ui.enabled>false</spark.ui.enabled>
Expand Down Expand Up @@ -1548,6 +1550,26 @@
</execution>
</executions>
</plugin>

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>create-tmp-dir</id>
<phase>generate-test-resources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/tmp" />
</target>
</configuration>
</execution>
</executions>
</plugin>

<!-- Enable surefire and scalatest in all children, in one place: -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
Expand Down
6 changes: 6 additions & 0 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,11 @@ object BuildCommons {
// Root project.
val spark = ProjectRef(buildLocation, "spark")
val sparkHome = buildLocation

val testTempDir = s"$sparkHome/target/tmp"
if (!new File(testTempDir).isDirectory()) {
require(new File(testTempDir).mkdirs())
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The problem with doing it this way is that buildLocation is just using the current directory as the build root, which is not always a valid assumption. We should instead remove that val and use projectRoot.value anyplace that we need the root spark directory.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That should affect also the val spark =... line above, right? So my particular change shouldn't have made anything worse than before.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, sorry I should have been more clear. This was a general comment about how we are doing things in the SBT build and not something that should have blocked merging this PR.

}

object SparkBuild extends PomBuild {
Expand Down Expand Up @@ -496,6 +501,7 @@ object TestSettings {
"SPARK_DIST_CLASSPATH" ->
(fullClasspath in Test).value.files.map(_.getAbsolutePath).mkString(":").stripSuffix(":"),
"JAVA_HOME" -> sys.env.get("JAVA_HOME").getOrElse(sys.props("java.home"))),
javaOptions in Test += s"-Djava.io.tmpdir=$testTempDir",
javaOptions in Test += "-Dspark.test.home=" + sparkHome,
javaOptions in Test += "-Dspark.testing=1",
javaOptions in Test += "-Dspark.port.maxRetries=100",
Expand Down