From 8da7cbfb35738e2afb970870dc9dc6bcffb0444b Mon Sep 17 00:00:00 2001 From: Brennon York Date: Thu, 27 Nov 2014 12:04:41 -0500 Subject: [PATCH 1/6] fixes SPARK-4298 --- .../org/apache/spark/deploy/SparkSubmitArguments.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index f0e9ee67f6a67..3a56b1c988edc 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -17,6 +17,7 @@ package org.apache.spark.deploy +import java.net.URI import java.util.jar.JarFile import scala.collection.mutable.{ArrayBuffer, HashMap} @@ -124,12 +125,13 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St // Try to set main class from JAR if no --class argument is given if (mainClass == null && !isPython && primaryResource != null) { try { - val jar = new JarFile(primaryResource) + val jar = new JarFile(new URI(primaryResource).getPath) // Note that this might still return null if no main-class is set; we catch that later mainClass = jar.getManifest.getMainAttributes.getValue("Main-Class") } catch { case e: Exception => - SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource) + SparkSubmit.printErrorAndExit("Cannot main: " + primaryResource) + //SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource) return } } From a0430390be4fee79feae219953e8b3181e6a31a6 Mon Sep 17 00:00:00 2001 From: Brennon York Date: Tue, 2 Dec 2014 14:40:58 -0500 Subject: [PATCH 2/6] updated to split the uri and jar vals --- .../scala/org/apache/spark/deploy/SparkSubmitArguments.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 3a56b1c988edc..41ff434caf3e9 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -125,7 +125,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St // Try to set main class from JAR if no --class argument is given if (mainClass == null && !isPython && primaryResource != null) { try { - val jar = new JarFile(new URI(primaryResource).getPath) + val uri = new URI(primaryResource) + val jar = new JarFile(uri.getPath) // Note that this might still return null if no main-class is set; we catch that later mainClass = jar.getManifest.getMainAttributes.getValue("Main-Class") } catch { From 8d20936158eef737ed73c7639eb382fd3dab2d2c Mon Sep 17 00:00:00 2001 From: Brennon York Date: Tue, 2 Dec 2014 14:45:01 -0500 Subject: [PATCH 3/6] updated to reset the error message back to the default --- .../scala/org/apache/spark/deploy/SparkSubmitArguments.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 41ff434caf3e9..cd29db79fc13c 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -131,8 +131,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St mainClass = jar.getManifest.getMainAttributes.getValue("Main-Class") } catch { case e: Exception => - SparkSubmit.printErrorAndExit("Cannot main: " + primaryResource) - //SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource) + SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource) return } } From c6dad689d6e27a8a8c09c279441013dc179487ec Mon Sep 17 00:00:00 2001 From: Brennon York Date: Thu, 11 Dec 2014 13:30:40 -0800 Subject: [PATCH 4/6] Set case statement to support multiple jar URI's and enabled the 'file' URI to load the main-class --- .../spark/deploy/SparkSubmitArguments.scala | 26 +++++++++++++------ 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index cd29db79fc13c..d83ccc384b932 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -124,15 +124,25 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St // Try to set main class from JAR if no --class argument is given if (mainClass == null && !isPython && primaryResource != null) { - try { - val uri = new URI(primaryResource) - val jar = new JarFile(uri.getPath) - // Note that this might still return null if no main-class is set; we catch that later - mainClass = jar.getManifest.getMainAttributes.getValue("Main-Class") - } catch { - case e: Exception => - SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource) + val uri = new URI(primaryResource) + val uriScheme = uri.getScheme() + // Note that this might still return null if no main-class is set; we catch that later + mainClass = uriScheme match { + case "file" => { + try { + val jar = new JarFile(uri.getPath) + jar.getManifest.getMainAttributes.getValue("Main-Class") + } catch { + case e: Exception => + SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource) + return + } + } + case _ => { + SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource + + " with URI: " + uriScheme) return + } } } From 14daa202030f8da4908b7dbce46b94cd8116d2c4 Mon Sep 17 00:00:00 2001 From: Brennon York Date: Thu, 11 Dec 2014 14:13:47 -0800 Subject: [PATCH 5/6] pushed mainClass assignment into match statement, removed spurious spaces, removed { } from case statements, removed return values --- .../apache/spark/deploy/SparkSubmitArguments.scala | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index d83ccc384b932..c61aa91ef7b92 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -127,22 +127,18 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St val uri = new URI(primaryResource) val uriScheme = uri.getScheme() // Note that this might still return null if no main-class is set; we catch that later - mainClass = uriScheme match { - case "file" => { + uriScheme match { + case "file" => try { val jar = new JarFile(uri.getPath) - jar.getManifest.getMainAttributes.getValue("Main-Class") + mainClass = jar.getManifest.getMainAttributes.getValue("Main-Class") } catch { case e: Exception => SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource) - return } - } - case _ => { + case _ => SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource + " with URI: " + uriScheme) - return - } } } From 5e0fce1cd0a6b4b413c31e8ca214c11c569c6164 Mon Sep 17 00:00:00 2001 From: Brennon York Date: Wed, 31 Dec 2014 09:45:04 -0800 Subject: [PATCH 6/6] Use string interpolation for error messages, moved comment line from original code to above its necessary code segment --- .../org/apache/spark/deploy/SparkSubmitArguments.scala | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index c61aa91ef7b92..73a2dd9e9d386 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -126,19 +126,21 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St if (mainClass == null && !isPython && primaryResource != null) { val uri = new URI(primaryResource) val uriScheme = uri.getScheme() - // Note that this might still return null if no main-class is set; we catch that later + uriScheme match { case "file" => try { val jar = new JarFile(uri.getPath) + // Note that this might still return null if no main-class is set; we catch that later mainClass = jar.getManifest.getMainAttributes.getValue("Main-Class") } catch { case e: Exception => - SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource) + SparkSubmit.printErrorAndExit(s"Cannot load main class from JAR $primaryResource") } case _ => - SparkSubmit.printErrorAndExit("Cannot load main class from JAR: " + primaryResource + - " with URI: " + uriScheme) + SparkSubmit.printErrorAndExit( + s"Cannot load main class from JAR $primaryResource with URI $uriScheme. " + + "Please specify a class through --class.") } }