Skip to content

Commit 0647c6b

Browse files
committed
Allow spark-daemon.sh to support foreground operation
1 parent 1ed5708 commit 0647c6b

File tree

1 file changed

+51
-29
lines changed

1 file changed

+51
-29
lines changed

sbin/spark-daemon.sh

Lines changed: 51 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
# SPARK_NICENESS The scheduling priority for daemons. Defaults to 0.
3030
##
3131

32-
usage="Usage: spark-daemon.sh [--config <conf-dir>] (start|stop) <spark-command> <spark-instance-number> <args...>"
32+
usage="Usage: spark-daemon.sh [--foreground] [--config <conf-dir>] (start|stop) <spark-command> <spark-instance-number> <args...>"
3333

3434
# if no args specified, show usage
3535
if [ $# -le 1 ]; then
@@ -44,6 +44,14 @@ sbin="`cd "$sbin"; pwd`"
4444

4545
# get arguments
4646

47+
RUN_IN_FOREGROUND=0
48+
# Check if --foreground is passed as an argument. It is an optional parameter.
49+
if [ "$1" == "--foreground" ]
50+
then
51+
shift
52+
RUN_IN_FOREGROUND="1"
53+
fi
54+
4755
# Check if --config is passed as an argument. It is an optional parameter.
4856
# Exit if the argument is not a directory.
4957

@@ -95,17 +103,19 @@ fi
95103

96104
export SPARK_PRINT_LAUNCH_COMMAND="1"
97105

98-
# get log directory
99-
if [ "$SPARK_LOG_DIR" = "" ]; then
100-
export SPARK_LOG_DIR="$SPARK_HOME/logs"
101-
fi
102-
mkdir -p "$SPARK_LOG_DIR"
103-
touch "$SPARK_LOG_DIR"/.spark_test > /dev/null 2>&1
104-
TEST_LOG_DIR=$?
105-
if [ "${TEST_LOG_DIR}" = "0" ]; then
106-
rm -f "$SPARK_LOG_DIR"/.spark_test
107-
else
108-
chown "$SPARK_IDENT_STRING" "$SPARK_LOG_DIR"
106+
if [ "$RUN_IN_FOREGROUND" = "0" ]; then
107+
# get log directory
108+
if [ "$SPARK_LOG_DIR" = "" ]; then
109+
export SPARK_LOG_DIR="$SPARK_HOME/logs"
110+
fi
111+
mkdir -p "$SPARK_LOG_DIR"
112+
touch "$SPARK_LOG_DIR"/.spark_test > /dev/null 2>&1
113+
TEST_LOG_DIR=$?
114+
if [ "${TEST_LOG_DIR}" = "0" ]; then
115+
rm -f "$SPARK_LOG_DIR"/.spark_test
116+
else
117+
chown "$SPARK_IDENT_STRING" "$SPARK_LOG_DIR"
118+
fi
109119
fi
110120

111121
if [ "$SPARK_PID_DIR" = "" ]; then
@@ -141,24 +151,36 @@ case $option in
141151
rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $SPARK_MASTER/ "$SPARK_HOME"
142152
fi
143153

144-
spark_rotate_log "$log"
145-
echo "starting $command, logging to $log"
146-
if [ $option == spark-submit ]; then
147-
source "$SPARK_HOME"/bin/utils.sh
148-
gatherSparkSubmitOpts "$@"
149-
nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/bin/spark-submit --class $command \
150-
"${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}" >> "$log" 2>&1 < /dev/null &
154+
if [ "$RUN_IN_FOREGROUND" = "0" ]; then
155+
spark_rotate_log "$log"
156+
echo starting $command, logging to $log
157+
if [ $option == spark-submit ]; then
158+
source "$SPARK_HOME"/bin/utils.sh
159+
gatherSparkSubmitOpts "$@"
160+
nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/bin/spark-submit --class $command \
161+
"${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}" >> "$log" 2>&1 < /dev/null &
162+
else
163+
nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/bin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
164+
fi
165+
newpid=$!
166+
echo $newpid > $pid
167+
sleep 2
168+
# Check if the process has died; in that case we'll tail the log so the user can see
169+
if ! kill -0 $newpid >/dev/null 2>&1; then
170+
echo "failed to launch $command:"
171+
tail -2 "$log" | sed 's/^/ /'
172+
echo "full log in $log"
173+
fi
151174
else
152-
nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/bin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
153-
fi
154-
newpid=$!
155-
echo $newpid > $pid
156-
sleep 2
157-
# Check if the process has died; in that case we'll tail the log so the user can see
158-
if [[ ! $(ps -p "$newpid" -o args=) =~ $command ]]; then
159-
echo "failed to launch $command:"
160-
tail -2 "$log" | sed 's/^/ /'
161-
echo "full log in $log"
175+
echo starting $command, logging to stdout
176+
if [ $option == spark-submit ]; then
177+
source "$SPARK_HOME"/bin/utils.sh
178+
gatherSparkSubmitOpts "$@"
179+
nice -n $SPARK_NICENESS "$SPARK_PREFIX"/bin/spark-submit --class $command \
180+
"${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}" 2>&1 < /dev/null
181+
else
182+
nice -n $SPARK_NICENESS "$SPARK_PREFIX"/bin/spark-class $command "$@" 2>&1 < /dev/null
183+
fi
162184
fi
163185
;;
164186

0 commit comments

Comments
 (0)