From feaf86de1500a50792181cd12f9e644f54e6b452 Mon Sep 17 00:00:00 2001 From: Mark Hamstra Date: Wed, 26 Mar 2014 12:24:09 -0700 Subject: [PATCH 1/2] Trimmed down deb package --- assembly/pom.xml | 42 ------------------------------------------ 1 file changed, 42 deletions(-) diff --git a/assembly/pom.xml b/assembly/pom.xml index f87151d7f0a5f..eec798175580e 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -68,31 +68,11 @@ spark-core_2.9.3 ${project.version} - - org.apache.spark - spark-bagel_2.9.3 - ${project.version} - - - org.apache.spark - spark-mllib_2.9.3 - ${project.version} - org.apache.spark spark-repl_2.9.3 ${project.version} - - org.apache.spark - spark-streaming_2.9.3 - ${project.version} - - - net.sf.py4j - py4j - 0.7 - @@ -274,17 +254,6 @@ 744 - - ${basedir}/../pyspark - file - - perm - ${deb.user} - ${deb.user} - ${deb.install.path} - 744 - - ${basedir}/src/deb/RELEASE file @@ -317,17 +286,6 @@ 744 - - ${basedir}/../python - directory - - perm - ${deb.user} - ${deb.user} - ${deb.install.path}/python - 744 - - From 53bccecb096c8acecd62d41525d4bb1af953c75f Mon Sep 17 00:00:00 2001 From: Aaron Davidson Date: Mon, 19 May 2014 20:55:26 -0700 Subject: [PATCH 2/2] SPARK-1689: Spark application should die when removed by Master scheduler.error() will mask the error if there are active tasks. Being removed is a cataclysmic event for Spark applications, and should probably be treated as such. Author: Aaron Davidson Closes #832 from aarondav/i-love-u and squashes the following commits: 9f1200f [Aaron Davidson] SPARK-1689: Spark application should die when removed by Master Conflicts: core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala --- .../spark/scheduler/cluster/SparkDeploySchedulerBackend.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala index cefa970bb92f9..19ce492807715 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala @@ -79,6 +79,8 @@ private[spark] class SparkDeploySchedulerBackend( if (!stopping) { logError("Spark cluster looks dead, giving up.") scheduler.error("Spark cluster looks down") + // Ensure the application terminates, as we can no longer run jobs. + sc.stop() } }