From 43e0febda5ccdf93335adc867eba789511fbd72a Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Tue, 25 Feb 2014 04:30:44 +0900 Subject: [PATCH 1/3] Prevent ContextClassLoader of Actor from becoming ClassLoader of Executor. --- core/src/main/scala/org/apache/spark/executor/Executor.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala index 8fe9b848ba145..cc3eb918a8b26 100644 --- a/core/src/main/scala/org/apache/spark/executor/Executor.scala +++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala @@ -112,11 +112,10 @@ private[spark] class Executor( } } - // Create our ClassLoader and set it on this thread + // Create our ClassLoader // do this after SparkEnv creation so can access the SecurityManager private val urlClassLoader = createClassLoader() private val replClassLoader = addReplClassLoaderIfNeeded(urlClassLoader) - Thread.currentThread.setContextClassLoader(replClassLoader) // Akka's message frame size. If task result is bigger than this, we use the block manager // to send the result back. From c6c09b632708226a26ce985871351b147fc1fa0c Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Mon, 24 Mar 2014 20:03:54 +0900 Subject: [PATCH 2/3] Add a test to collect objects of class defined in repl. --- .../test/scala/org/apache/spark/repl/ReplSuite.scala | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index 8203b8f6122e1..4155007c6d337 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -242,4 +242,15 @@ class ReplSuite extends FunSuite { assertContains("res4: Array[Int] = Array(0, 0, 0, 0, 0)", output) } } + + test("collecting objects of class defined in repl") { + val output = runInterpreter("local[2]", + """ + |case class Foo(i: Int) + |val ret = sc.parallelize((1 to 100).map(Foo), 10).collect + """.stripMargin) + assertDoesNotContain("error:", output) + assertDoesNotContain("Exception", output) + assertContains("ret: Array[Foo] = Array(Foo(1),", output) + } } From d79e8c062ccea02b93e076e57c67c84d568e7c52 Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Wed, 26 Mar 2014 15:38:11 +0900 Subject: [PATCH 3/3] Change a parent class loader of ExecutorURLClassLoader. --- core/src/main/scala/org/apache/spark/executor/Executor.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala index cc3eb918a8b26..13e2e292428b4 100644 --- a/core/src/main/scala/org/apache/spark/executor/Executor.scala +++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala @@ -293,7 +293,7 @@ private[spark] class Executor( * created by the interpreter to the search path */ private def createClassLoader(): ExecutorURLClassLoader = { - val loader = this.getClass.getClassLoader + val loader = Thread.currentThread().getContextClassLoader // For each of the jars in the jarSet, add them to the class loader. // We assume each of the files has already been fetched.