@@ -48,8 +48,8 @@ RUN_IN_FOREGROUND=0
4848# Check if --foreground is passed as an argument. It is an optional parameter.
4949if [ " $1 " == " --foreground" ]
5050then
51- shift
52- RUN_IN_FOREGROUND=" 1"
51+ shift
52+ RUN_IN_FOREGROUND=" 1"
5353fi
5454
5555# Check if --config is passed as an argument. It is an optional parameter.
104104export SPARK_PRINT_LAUNCH_COMMAND=" 1"
105105
106106if [ " $RUN_IN_FOREGROUND " = " 0" ]; then
107- # get log directory
108- if [ " $SPARK_LOG_DIR " = " " ]; then
109- export SPARK_LOG_DIR=" $SPARK_HOME /logs"
110- fi
111- mkdir -p " $SPARK_LOG_DIR "
112- touch " $SPARK_LOG_DIR " /.spark_test > /dev/null 2>&1
113- TEST_LOG_DIR=$?
114- if [ " ${TEST_LOG_DIR} " = " 0" ]; then
115- rm -f " $SPARK_LOG_DIR " /.spark_test
116- else
117- chown " $SPARK_IDENT_STRING " " $SPARK_LOG_DIR "
118- fi
107+ # get log directory
108+ if [ " $SPARK_LOG_DIR " = " " ]; then
109+ export SPARK_LOG_DIR=" $SPARK_HOME /logs"
110+ fi
111+ mkdir -p " $SPARK_LOG_DIR "
112+ touch " $SPARK_LOG_DIR " /.spark_test > /dev/null 2>&1
113+ TEST_LOG_DIR=$?
114+ if [ " ${TEST_LOG_DIR} " = " 0" ]; then
115+ rm -f " $SPARK_LOG_DIR " /.spark_test
116+ else
117+ chown " $SPARK_IDENT_STRING " " $SPARK_LOG_DIR "
118+ fi
119119fi
120120
121121if [ " $SPARK_PID_DIR " = " " ]; then
@@ -152,35 +152,35 @@ case $option in
152152 fi
153153
154154 if [ " $RUN_IN_FOREGROUND " = " 0" ]; then
155- spark_rotate_log " $log "
156- echo starting $command , logging to $log
157- if [ $option == spark-submit ]; then
158- source " $SPARK_HOME " /bin/utils.sh
159- gatherSparkSubmitOpts " $@ "
160- nohup nice -n $SPARK_NICENESS " $SPARK_PREFIX " /bin/spark-submit --class $command \
161- " ${SUBMISSION_OPTS[@]} " spark-internal " ${APPLICATION_OPTS[@]} " >> " $log " 2>&1 < /dev/null &
162- else
163- nohup nice -n $SPARK_NICENESS " $SPARK_PREFIX " /bin/spark-class $command " $@ " >> " $log " 2>&1 < /dev/null &
164- fi
165- newpid=$!
166- echo $newpid > $pid
167- sleep 2
168- # Check if the process has died; in that case we'll tail the log so the user can see
169- if ! kill -0 $newpid > /dev/null 2>&1 ; then
170- echo " failed to launch $command :"
171- tail -2 " $log " | sed ' s/^/ /'
172- echo " full log in $log "
173- fi
174- else
175- echo starting $command , logging to stdout
176- if [ $option == spark-submit ]; then
177- source " $SPARK_HOME " /bin/utils.sh
178- gatherSparkSubmitOpts " $@ "
179- nice -n $SPARK_NICENESS " $SPARK_PREFIX " /bin/spark-submit --class $command \
180- " ${SUBMISSION_OPTS[@]} " spark-internal " ${APPLICATION_OPTS[@]} " 2>&1 < /dev/null
181- else
182- nice -n $SPARK_NICENESS " $SPARK_PREFIX " /bin/spark-class $command " $@ " 2>&1 < /dev/null
183- fi
155+ spark_rotate_log " $log "
156+ echo starting $command , logging to $log
157+ if [ $option == spark-submit ]; then
158+ source " $SPARK_HOME " /bin/utils.sh
159+ gatherSparkSubmitOpts " $@ "
160+ nohup nice -n $SPARK_NICENESS " $SPARK_PREFIX " /bin/spark-submit --class $command \
161+ " ${SUBMISSION_OPTS[@]} " spark-internal " ${APPLICATION_OPTS[@]} " >> " $log " 2>&1 < /dev/null &
162+ else
163+ nohup nice -n $SPARK_NICENESS " $SPARK_PREFIX " /bin/spark-class $command " $@ " >> " $log " 2>&1 < /dev/null &
164+ fi
165+ newpid=$!
166+ echo $newpid > $pid
167+ sleep 2
168+ # Check if the process has died; in that case we'll tail the log so the user can see
169+ if ! kill -0 $newpid > /dev/null 2>&1 ; then
170+ echo " failed to launch $command :"
171+ tail -2 " $log " | sed ' s/^/ /'
172+ echo " full log in $log "
173+ fi
174+ else # run in foreground
175+ echo starting $command , logging to stdout
176+ if [ $option == spark-submit ]; then
177+ source " $SPARK_HOME " /bin/utils.sh
178+ gatherSparkSubmitOpts " $@ "
179+ nice -n $SPARK_NICENESS " $SPARK_PREFIX " /bin/spark-submit --class $command \
180+ " ${SUBMISSION_OPTS[@]} " spark-internal " ${APPLICATION_OPTS[@]} " 2>&1 < /dev/null
181+ else
182+ nice -n $SPARK_NICENESS " $SPARK_PREFIX " /bin/spark-class $command " $@ " 2>&1 < /dev/null
183+ fi
184184 fi
185185 ;;
186186
0 commit comments