Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
4e96c01
Add YARN/Stable compiled classes to the CLASSPATH.
berngp Apr 15, 2014
1342886
The `spark-class` shell now ignores non jar files in the assembly dir…
berngp Apr 15, 2014
ddf2547
The `spark-shell` option `--log-conf` also enables the SPARK_PRINT_LA…
berngp Apr 15, 2014
2204539
Root is now Spark and qualify the assembly if it was built with YARN.
berngp Apr 15, 2014
889bf4e
Upgrade the Maven Build to YARN 2.3.0.
berngp Apr 16, 2014
460510a
merge https://github.com/berngp/spark/commits/feature/small-shell-cha…
witgo Apr 29, 2014
f1c7535
Improved build configuration Ⅱ
witgo Apr 29, 2014
8540e83
review commit
witgo Apr 30, 2014
c4c6e45
review commit
witgo Apr 30, 2014
9f08e80
Merge branch 'master' of https://github.com/apache/spark into improve…
witgo May 1, 2014
e1a7e00
improve travis tests coverage
witgo May 1, 2014
effe79c
missing ","
witgo May 1, 2014
9ea1af9
add the dependency of commons-lang
witgo May 1, 2014
0ed124d
SPARK-1693: Most of the tests throw a java.lang.SecurityException whe…
witgo May 1, 2014
03b136f
revert .travis.yml
witgo May 1, 2014
d3488c6
Add the missing yarn dependencies
witgo May 1, 2014
779ae5d
Fix SPARK-1693: Dependent on multiple versions of servlet-api jars le…
witgo May 1, 2014
27bd426
review commit
witgo May 1, 2014
54a86b0
review commit
witgo May 2, 2014
882e35d
review commit
witgo May 2, 2014
31451df
Compile hive optional
witgo May 3, 2014
5fb961f
revert exclusion org.eclipse.jetty.orbit:javax.servlet
witgo May 3, 2014
ea53549
Merge branch 'master' of https://github.com/apache/spark into improve…
witgo May 3, 2014
a5ff7d1
revert exclusion org.eclipse.jetty.orbit:javax.servlet
witgo May 3, 2014
17f6e7d
merge master
witgo May 4, 2014
3218d3b
merge master
witgo May 5, 2014
e788690
merge master
witgo May 7, 2014
8b0c63f
Merge branch 'master' of https://github.com/apache/spark into improve…
witgo May 12, 2014
427d499
merge master
witgo May 12, 2014
f1eb268
Merge branch 'master' of https://github.com/apache/spark into improve…
witgo May 12, 2014
4cc0c90
revert profile hive
witgo May 12, 2014
4277fed
review commit
witgo May 12, 2014
31c6409
review commit
witgo May 12, 2014
7d8cabf
Merge branch 'master' of https://github.com/apache/spark into improve…
witgo May 14, 2014
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions bin/spark-class
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,8 @@ export JAVA_OPTS

if [ ! -f "$FWDIR/RELEASE" ]; then
# Exit if the user hasn't compiled Spark
num_jars=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep "spark-assembly.*hadoop.*.jar" | wc -l)
jars_list=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep "spark-assembly.*hadoop.*.jar")
num_jars=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep -E "spark-assembly.*hadoop.*.jar$" | wc -l)
jars_list=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep -E "spark-assembly.*hadoop.*.jar$")
if [ "$num_jars" -eq "0" ]; then
echo "Failed to find Spark assembly in $FWDIR/assembly/target/scala-$SCALA_VERSION/" >&2
echo "You need to build Spark before running this program." >&2
Expand Down
29 changes: 0 additions & 29 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -258,35 +258,6 @@
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<phase>test</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<exportAntProperties>true</exportAntProperties>
<target>
<property name="spark.classpath" refid="maven.test.classpath" />
<property environment="env" />
<fail message="Please set the SCALA_HOME (or SCALA_LIBRARY_PATH if scala is on the path) environment variables and retry.">
<condition>
<not>
<or>
<isset property="env.SCALA_HOME" />
<isset property="env.SCALA_LIBRARY_PATH" />
</or>
</not>
</condition>
</fail>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
Expand Down
5 changes: 4 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -789,6 +789,10 @@
<filereports>${project.build.directory}/SparkTestSuite.txt</filereports>
<argLine>-Xmx3g -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=512m</argLine>
<stderr/>
<environmentVariables>
<SPARK_HOME>${session.executionRootDirectory}</SPARK_HOME>
<SPARK_TESTING>1</SPARK_TESTING>
</environmentVariables>
</configuration>
<executions>
<execution>
Expand Down Expand Up @@ -1029,6 +1033,5 @@
</dependency>
</dependencies>
</profile>

</profiles>
</project>
22 changes: 11 additions & 11 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ object SparkBuild extends Build {
val SCALAC_JVM_VERSION = "jvm-1.6"
val JAVAC_JVM_VERSION = "1.6"

lazy val root = Project("root", file("."), settings = rootSettings) aggregate(allProjects: _*)
lazy val root = Project("spark", file("."), settings = rootSettings) aggregate(allProjects: _*)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just wondering - what is the benefit of this change?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just to increase readability.


lazy val core = Project("core", file("core"), settings = coreSettings)

Expand Down Expand Up @@ -266,16 +266,16 @@ object SparkBuild extends Build {
*/

libraryDependencies ++= Seq(
"io.netty" % "netty-all" % "4.0.17.Final",
"org.eclipse.jetty" % "jetty-server" % jettyVersion,
"org.eclipse.jetty" % "jetty-util" % jettyVersion,
"org.eclipse.jetty" % "jetty-plus" % jettyVersion,
"org.eclipse.jetty" % "jetty-security" % jettyVersion,
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
"com.novocode" % "junit-interface" % "0.10" % "test",
"org.easymock" % "easymock" % "3.1" % "test",
"org.mockito" % "mockito-all" % "1.8.5" % "test"
"io.netty" % "netty-all" % "4.0.17.Final",
"org.eclipse.jetty" % "jetty-server" % jettyVersion,
"org.eclipse.jetty" % "jetty-util" % jettyVersion,
"org.eclipse.jetty" % "jetty-plus" % jettyVersion,
"org.eclipse.jetty" % "jetty-security" % jettyVersion,
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
"com.novocode" % "junit-interface" % "0.10" % "test",
"org.easymock" % "easymock" % "3.1" % "test",
"org.mockito" % "mockito-all" % "1.8.5" % "test"
),

testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
Expand Down
30 changes: 0 additions & 30 deletions repl/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -92,42 +92,12 @@
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<phase>test</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<exportAntProperties>true</exportAntProperties>
<target>
<property name="spark.classpath" refid="maven.test.classpath" />
<property environment="env" />
<fail message="Please set the SCALA_HOME (or SCALA_LIBRARY_PATH if scala is on the path) environment variables and retry.">
<condition>
<not>
<or>
<isset property="env.SCALA_HOME" />
<isset property="env.SCALA_LIBRARY_PATH" />
</or>
</not>
</condition>
</fail>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<configuration>
<environmentVariables>
<SPARK_HOME>${basedir}/..</SPARK_HOME>
<SPARK_TESTING>1</SPARK_TESTING>
</environmentVariables>
</configuration>
</plugin>
Expand Down
30 changes: 0 additions & 30 deletions yarn/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -117,42 +117,12 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<phase>test</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<exportAntProperties>true</exportAntProperties>
<target>
<property name="spark.classpath" refid="maven.test.classpath" />
<property environment="env" />
<fail message="Please set the SCALA_HOME (or SCALA_LIBRARY_PATH if scala is on the path) environment variables and retry.">
<condition>
<not>
<or>
<isset property="env.SCALA_HOME" />
<isset property="env.SCALA_LIBRARY_PATH" />
</or>
</not>
</condition>
</fail>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<configuration>
<environmentVariables>
<SPARK_HOME>${basedir}/../..</SPARK_HOME>
<SPARK_TESTING>1</SPARK_TESTING>
<SPARK_CLASSPATH>${spark.classpath}</SPARK_CLASSPATH>
</environmentVariables>
</configuration>
Expand Down