Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
b877e20
move yarn to its own directory
jey Jul 17, 2013
f67b94a
remove core/src/hadoop{1,2} dirs
jey Jul 18, 2013
69c3bbf
dynamically detect hadoop version
jey Jul 18, 2013
273b499
yarn sbt
jey Jul 18, 2013
5d0785b
remove hadoop-yarn's org/apache/...
jey Jul 20, 2013
8b1c152
add comment
jey Jul 20, 2013
cb4ef19
yarn support
jey Jul 22, 2013
43ebcb8
rename HadoopMapRedUtil => SparkHadoopMapRedUtil, HadoopMapReduceUtil…
jey Jul 24, 2013
4f43fd7
make SparkHadoopUtil a member of SparkEnv
jey Jul 24, 2013
bd0bab4
SparkEnv isn't available this early, and not needed anyway
jey Jul 24, 2013
e2d7656
re-enable YARN support
jey Jul 24, 2013
8bb0bd1
YARN ApplicationMaster shouldn't wait forever
jey Jul 24, 2013
14b6bcd
update YARN docs
jey Jul 29, 2013
8f979ed
Fix newTaskAttemptID to work under YARN
jey Aug 6, 2013
a06a9d5
Rename HadoopWriter to SparkHadoopWriter since it's outside of our pa…
jey Aug 6, 2013
a0f0848
Update default version of Hadoop to 1.2.1
jey Aug 14, 2013
3f98eff
Allow make-distribution.sh to specify Hadoop version used
jey Aug 15, 2013
8add2d7
Fix repl/assembly when YARN enabled
jey Aug 15, 2013
353fab2
Initial changes to make Maven build agnostic of hadoop version
jey Aug 15, 2013
11b42a8
Maven build now works with CDH hadoop-2.0.0-mr1
jey Aug 15, 2013
9dd15fe
Don't mark hadoop-client as 'provided'
jey Aug 15, 2013
741ecd5
Forgot to remove a few references to ${classifier}
jey Aug 15, 2013
ad580b9
Maven build now also works with YARN
jey Aug 15, 2013
c1e547b
Updates to repl and example POMs to match SBT build
jey Aug 16, 2013
b1d9974
Fix SBT build under Hadoop 0.23.x
jey Aug 16, 2013
67b5936
Rename YARN build flag to SPARK_WITH_YARN
jey Aug 16, 2013
44000b1
Make YARN POM file valid
jey Aug 18, 2013
47a7c43
Don't assume spark-examples JAR always exists
jey Aug 18, 2013
bdd861c
Fix Maven build with Hadoop 0.23.9
jey Aug 19, 2013
23f4622
Remove redundant dependencies from POMs
jey Aug 19, 2013
6f6944c
Update SBT build to use simpler fix for Hadoop 0.23.9
jey Aug 19, 2013
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 1 addition & 26 deletions assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -37,56 +37,31 @@
</plugins>
</build>

<profiles>
<profile>
<id>hadoop1</id>
<properties>
<classifier.name>hadoop1</classifier.name>
</properties>
</profile>
<profile>
<id>hadoop2</id>
<properties>
<classifier.name>hadoop2</classifier.name>
</properties>
</profile>
<profile>
<id>hadoop2-yarn</id>
<properties>
<classifier.name>hadoop2-yarn</classifier.name>
</properties>
</profile>
</profiles>
<dependencies>
<dependency>
<groupId>org.spark-project</groupId>
<artifactId>spark-core</artifactId>
<classifier>${classifier.name}</classifier>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.spark-project</groupId>
<artifactId>spark-bagel</artifactId>
<classifier>${classifier.name}</classifier>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.spark-project</groupId>
<artifactId>spark-mllib</artifactId>
<classifier>${classifier.name}</classifier>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.spark-project</groupId>
<artifactId>spark-repl</artifactId>
<classifier>${classifier.name}</classifier>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.spark-project</groupId>
<artifactId>spark-streaming</artifactId>
<classifier>${classifier.name}</classifier>
<version>${project.version}</version>
</dependency>
</dependencies>
</project>
</project>
105 changes: 5 additions & 100 deletions bagel/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,15 @@
<url>http://spark-project.org/</url>

<dependencies>
<dependency>
<groupId>org.spark-project</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</dependency>

<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.version}</artifactId>
Expand All @@ -58,103 +62,4 @@
</plugin>
</plugins>
</build>

<profiles>
<profile>
<id>hadoop1</id>
<dependencies>
<dependency>
<groupId>org.spark-project</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
<classifier>hadoop1</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<classifier>hadoop1</classifier>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>hadoop2</id>
<dependencies>
<dependency>
<groupId>org.spark-project</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
<classifier>hadoop2</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<classifier>hadoop2</classifier>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>hadoop2-yarn</id>
<dependencies>
<dependency>
<groupId>org.spark-project</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
<classifier>hadoop2-yarn</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<classifier>hadoop2-yarn</classifier>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
2 changes: 2 additions & 0 deletions bin/compute-classpath.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ set EXAMPLES_DIR=%FWDIR%examples
set BAGEL_DIR=%FWDIR%bagel
set MLLIB_DIR=%FWDIR%mllib
set TOOLS_DIR=%FWDIR%tools
set YARN_DIR=%FWDIR%yarn
set STREAMING_DIR=%FWDIR%streaming
set PYSPARK_DIR=%FWDIR%python

Expand All @@ -50,6 +51,7 @@ set CLASSPATH=%CLASSPATH%;%FWDIR%python\lib\*
set CLASSPATH=%CLASSPATH%;%BAGEL_DIR%\target\scala-%SCALA_VERSION%\classes
set CLASSPATH=%CLASSPATH%;%MLLIB_DIR%\target\scala-%SCALA_VERSION%\classes
set CLASSPATH=%CLASSPATH%;%TOOLS_DIR%\target\scala-%SCALA_VERSION%\classes
set CLASSPATH=%CLASSPATH%;%YARN_DIR%\target\scala-%SCALA_VERSION%\classes

rem Add hadoop conf dir - else FileSystem.*, etc fail
rem Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts
Expand Down
9 changes: 6 additions & 3 deletions bin/compute-classpath.sh
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ EXAMPLES_DIR="$FWDIR/examples"
BAGEL_DIR="$FWDIR/bagel"
MLLIB_DIR="$FWDIR/mllib"
TOOLS_DIR="$FWDIR/tools"
YARN_DIR="$FWDIR/yarn"
STREAMING_DIR="$FWDIR/streaming"
PYSPARK_DIR="$FWDIR/python"

Expand All @@ -62,16 +63,18 @@ function dev_classpath {
CLASSPATH="$CLASSPATH:$REPL_DIR/lib/*"
# Add the shaded JAR for Maven builds
if [ -e $REPL_BIN_DIR/target ]; then
for jar in `find "$REPL_BIN_DIR/target" -name 'spark-repl-*-shaded-hadoop*.jar'`; do
for jar in `find "$REPL_BIN_DIR/target" -name 'spark-repl-*-shaded.jar'`; do
CLASSPATH="$CLASSPATH:$jar"
done
# The shaded JAR doesn't contain examples, so include those separately
EXAMPLES_JAR=`ls "$EXAMPLES_DIR/target/spark-examples"*[0-9T].jar`
CLASSPATH+=":$EXAMPLES_JAR"
for jar in `find "$EXAMPLES_DIR/target" -name 'spark-examples*[0-9T].jar'`; do
CLASSPATH="$CLASSPATH:$jar"
done
fi
CLASSPATH="$CLASSPATH:$BAGEL_DIR/target/scala-$SCALA_VERSION/classes"
CLASSPATH="$CLASSPATH:$MLLIB_DIR/target/scala-$SCALA_VERSION/classes"
CLASSPATH="$CLASSPATH:$TOOLS_DIR/target/scala-$SCALA_VERSION/classes"
CLASSPATH="$CLASSPATH:$YARN_DIR/target/scala-$SCALA_VERSION/classes"
for jar in `find $PYSPARK_DIR/lib -name '*jar'`; do
CLASSPATH="$CLASSPATH:$jar"
done
Expand Down
Loading