@@ -28,18 +28,20 @@ set -o pipefail
2828set -e
2929
3030# Figure out where the Spark framework is installed
31- FWDIR =" $( cd " ` dirname " $0 " ` " ; pwd) "
32- DISTDIR=" $FWDIR /dist"
31+ SPARK_HOME =" $( cd " ` dirname " $0 " ` " ; pwd) "
32+ DISTDIR=" $SPARK_HOME /dist"
3333
3434SPARK_TACHYON=false
3535MAKE_TGZ=false
3636NAME=none
37+ MVN=" $SPARK_HOME /build/mvn"
3738
3839function exit_with_usage {
3940 echo " make-distribution.sh - tool for making binary distributions of Spark"
4041 echo " "
4142 echo " usage:"
42- echo " ./make-distribution.sh [--name] [--tgz] [--with-tachyon] <maven build options>"
43+ cl_options=" [--name] [--tgz] [--mvn <mvn-command>] [--with-tachyon]"
44+ echo " ./make-distribution.sh $cl_options <maven build options>"
4345 echo " See Spark's \" Building Spark\" doc for correct Maven options."
4446 echo " "
4547 exit 1
@@ -71,6 +73,10 @@ while (( "$#" )); do
7173 --tgz)
7274 MAKE_TGZ=true
7375 ;;
76+ --mvn)
77+ MVN=" $2 "
78+ shift
79+ ;;
7480 --name)
7581 NAME=" $2 "
7682 shift
@@ -109,17 +115,17 @@ if which git &>/dev/null; then
109115 unset GITREV
110116fi
111117
112- if ! which mvn & > /dev/null; then
113- echo -e " You need Maven installed to build Spark ."
114- echo -e " Download Maven from https://maven.apache.org/ "
118+ if ! which $MVN & > /dev/null; then
119+ echo -e " Could not locate Maven command: ' $MVN ' ."
120+ echo -e " Specify the Maven command with the --mvn flag "
115121 exit -1;
116122fi
117123
118124VERSION=$( mvn help:evaluate -Dexpression=project.version 2> /dev/null | grep -v " INFO" | tail -n 1)
119125SPARK_HADOOP_VERSION=$( mvn help:evaluate -Dexpression=hadoop.version $@ 2> /dev/null\
120126 | grep -v " INFO" \
121127 | tail -n 1)
122- SPARK_HIVE=$( mvn help:evaluate -Dexpression=project.activeProfiles -pl sql/hive $@ 2> /dev/null\
128+ SPARK_HIVE=$( $MVN help:evaluate -Dexpression=project.activeProfiles -pl sql/hive $@ 2> /dev/null\
123129 | grep -v " INFO" \
124130 | fgrep --count " <id>hive</id>" ; \
125131 # Reset exit status to 0, otherwise the script stops here if the last grep finds nothing\
@@ -161,11 +167,11 @@ else
161167fi
162168
163169# Build uber fat JAR
164- cd " $FWDIR "
170+ cd " $SPARK_HOME "
165171
166172export MAVEN_OPTS=" -Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m"
167173
168- BUILD_COMMAND=" mvn clean package -DskipTests $@ "
174+ BUILD_COMMAND=" $MVN clean package -DskipTests $@ "
169175
170176# Actually build the jar
171177echo -e " \nBuilding with..."
@@ -177,41 +183,42 @@ ${BUILD_COMMAND}
177183rm -rf " $DISTDIR "
178184mkdir -p " $DISTDIR /lib"
179185echo " Spark $VERSION$GITREVSTRING built for Hadoop $SPARK_HADOOP_VERSION " > " $DISTDIR /RELEASE"
186+ echo " Build flags: $@ " >> " $DISTDIR /RELEASE"
180187
181188# Copy jars
182- cp " $FWDIR " /assembly/target/scala* /* assembly* hadoop* .jar " $DISTDIR /lib/"
183- cp " $FWDIR " /examples/target/scala* /spark-examples* .jar " $DISTDIR /lib/"
189+ cp " $SPARK_HOME " /assembly/target/scala* /* assembly* hadoop* .jar " $DISTDIR /lib/"
190+ cp " $SPARK_HOME " /examples/target/scala* /spark-examples* .jar " $DISTDIR /lib/"
184191# This will fail if the -Pyarn profile is not provided
185192# In this case, silence the error and ignore the return code of this command
186- cp " $FWDIR " /network/yarn/target/scala* /spark-* -yarn-shuffle.jar " $DISTDIR /lib/" & > /dev/null || :
193+ cp " $SPARK_HOME " /network/yarn/target/scala* /spark-* -yarn-shuffle.jar " $DISTDIR /lib/" & > /dev/null || :
187194
188195# Copy example sources (needed for python and SQL)
189196mkdir -p " $DISTDIR /examples/src/main"
190- cp -r " $FWDIR " /examples/src/main " $DISTDIR /examples/src/"
197+ cp -r " $SPARK_HOME " /examples/src/main " $DISTDIR /examples/src/"
191198
192199if [ " $SPARK_HIVE " == " 1" ]; then
193- cp " $FWDIR " /lib_managed/jars/datanucleus* .jar " $DISTDIR /lib/"
200+ cp " $SPARK_HOME " /lib_managed/jars/datanucleus* .jar " $DISTDIR /lib/"
194201fi
195202
196203# Copy license and ASF files
197- cp " $FWDIR /LICENSE" " $DISTDIR "
198- cp " $FWDIR /NOTICE" " $DISTDIR "
204+ cp " $SPARK_HOME /LICENSE" " $DISTDIR "
205+ cp " $SPARK_HOME /NOTICE" " $DISTDIR "
199206
200- if [ -e " $FWDIR " /CHANGES.txt ]; then
201- cp " $FWDIR /CHANGES.txt" " $DISTDIR "
207+ if [ -e " $SPARK_HOME " /CHANGES.txt ]; then
208+ cp " $SPARK_HOME /CHANGES.txt" " $DISTDIR "
202209fi
203210
204211# Copy data files
205- cp -r " $FWDIR /data" " $DISTDIR "
212+ cp -r " $SPARK_HOME /data" " $DISTDIR "
206213
207214# Copy other things
208215mkdir " $DISTDIR " /conf
209- cp " $FWDIR " /conf/* .template " $DISTDIR " /conf
210- cp " $FWDIR /README.md" " $DISTDIR "
211- cp -r " $FWDIR /bin" " $DISTDIR "
212- cp -r " $FWDIR /python" " $DISTDIR "
213- cp -r " $FWDIR /sbin" " $DISTDIR "
214- cp -r " $FWDIR /ec2" " $DISTDIR "
216+ cp " $SPARK_HOME " /conf/* .template " $DISTDIR " /conf
217+ cp " $SPARK_HOME /README.md" " $DISTDIR "
218+ cp -r " $SPARK_HOME /bin" " $DISTDIR "
219+ cp -r " $SPARK_HOME /python" " $DISTDIR "
220+ cp -r " $SPARK_HOME /sbin" " $DISTDIR "
221+ cp -r " $SPARK_HOME /ec2" " $DISTDIR "
215222
216223# Download and copy in tachyon, if requested
217224if [ " $SPARK_TACHYON " == " true" ]; then
243250
244251if [ " $MAKE_TGZ " == " true" ]; then
245252 TARDIR_NAME=spark-$VERSION -bin-$NAME
246- TARDIR=" $FWDIR /$TARDIR_NAME "
253+ TARDIR=" $SPARK_HOME /$TARDIR_NAME "
247254 rm -rf " $TARDIR "
248255 cp -r " $DISTDIR " " $TARDIR "
249- tar czf " spark-$VERSION -bin-$NAME .tgz" -C " $FWDIR " " $TARDIR_NAME "
256+ tar czf " spark-$VERSION -bin-$NAME .tgz" -C " $SPARK_HOME " " $TARDIR_NAME "
250257 rm -rf " $TARDIR "
251258fi
0 commit comments