Skip to content

Commit 3fc27e7

Browse files
committed
Merge branch 'master' into SPARK-6263
2 parents 6084e9c + 660c6ce commit 3fc27e7

File tree

121 files changed

+2064
-1458
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

121 files changed

+2064
-1458
lines changed

LICENSE

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -948,6 +948,6 @@ The following components are provided under the MIT License. See project link fo
948948
(MIT License) SLF4J LOG4J-12 Binding (org.slf4j:slf4j-log4j12:1.7.5 - http://www.slf4j.org)
949949
(MIT License) pyrolite (org.spark-project:pyrolite:2.0.1 - http://pythonhosted.org/Pyro4/)
950950
(MIT License) scopt (com.github.scopt:scopt_2.10:3.2.0 - https://github.com/scopt/scopt)
951-
(The MIT License) Mockito (org.mockito:mockito-all:1.8.5 - http://www.mockito.org)
951+
(The MIT License) Mockito (org.mockito:mockito-core:1.8.5 - http://www.mockito.org)
952952
(MIT License) jquery (https://jquery.org/license/)
953953
(MIT License) AnchorJS (https://github.com/bryanbraun/anchorjs)

R/pkg/R/client.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ generateSparkSubmitArgs <- function(args, sparkHome, jars, sparkSubmitOpts, pack
5757
}
5858

5959
launchBackend <- function(args, sparkHome, jars, sparkSubmitOpts, packages) {
60-
sparkSubmitBin <- determineSparkSubmitBin()
60+
sparkSubmitBinName <- determineSparkSubmitBin()
6161
if (sparkHome != "") {
6262
sparkSubmitBin <- file.path(sparkHome, "bin", sparkSubmitBinName)
6363
} else {

R/pkg/R/sparkR.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ sparkR.init <- function(
132132
sparkHome = sparkHome,
133133
jars = jars,
134134
sparkSubmitOpts = Sys.getenv("SPARKR_SUBMIT_ARGS", "sparkr-shell"),
135-
sparkPackages = sparkPackages)
135+
packages = sparkPackages)
136136
# wait atmost 100 seconds for JVM to launch
137137
wait <- 0.1
138138
for (i in 1:25) {
2.82 KB
Binary file not shown.

R/pkg/inst/tests/jarTest.R

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
library(SparkR)
18+
19+
sc <- sparkR.init()
20+
21+
helloTest <- SparkR:::callJStatic("sparkR.test.hello",
22+
"helloWorld",
23+
"Dave")
24+
25+
basicFunction <- SparkR:::callJStatic("sparkR.test.basicFunction",
26+
"addStuff",
27+
2L,
28+
2L)
29+
30+
sparkR.stop()
31+
output <- c(helloTest, basicFunction)
32+
writeLines(output)

R/pkg/inst/tests/test_includeJAR.R

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
context("include an external JAR in SparkContext")
18+
19+
runScript <- function() {
20+
sparkHome <- Sys.getenv("SPARK_HOME")
21+
jarPath <- paste("--jars",
22+
shQuote(file.path(sparkHome, "R/lib/SparkR/test_support/sparktestjar_2.10-1.0.jar")))
23+
scriptPath <- file.path(sparkHome, "R/lib/SparkR/tests/jarTest.R")
24+
submitPath <- file.path(sparkHome, "bin/spark-submit")
25+
res <- system2(command = submitPath,
26+
args = c(jarPath, scriptPath),
27+
stdout = TRUE)
28+
tail(res, 2)
29+
}
30+
31+
test_that("sparkJars tag in SparkContext", {
32+
testOutput <- runScript()
33+
helloTest <- testOutput[1]
34+
expect_true(helloTest == "Hello, Dave")
35+
basicFunction <- testOutput[2]
36+
expect_true(basicFunction == 4L)
37+
})

R/pkg/inst/tests/test_sparkSQL.R

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,14 @@ library(testthat)
1919

2020
context("SparkSQL functions")
2121

22+
# Utility function for easily checking the values of a StructField
23+
checkStructField <- function(actual, expectedName, expectedType, expectedNullable) {
24+
expect_equal(class(actual), "structField")
25+
expect_equal(actual$name(), expectedName)
26+
expect_equal(actual$dataType.toString(), expectedType)
27+
expect_equal(actual$nullable(), expectedNullable)
28+
}
29+
2230
# Tests for SparkSQL functions in SparkR
2331

2432
sc <- sparkR.init()
@@ -52,9 +60,10 @@ test_that("infer types", {
5260
list(type = 'array', elementType = "integer", containsNull = TRUE))
5361
expect_equal(infer_type(list(1L, 2L)),
5462
list(type = 'array', elementType = "integer", containsNull = TRUE))
55-
expect_equal(infer_type(list(a = 1L, b = "2")),
56-
structType(structField(x = "a", type = "integer", nullable = TRUE),
57-
structField(x = "b", type = "string", nullable = TRUE)))
63+
testStruct <- infer_type(list(a = 1L, b = "2"))
64+
expect_true(class(testStruct) == "structType")
65+
checkStructField(testStruct$fields()[[1]], "a", "IntegerType", TRUE)
66+
checkStructField(testStruct$fields()[[2]], "b", "StringType", TRUE)
5867
e <- new.env()
5968
assign("a", 1L, envir = e)
6069
expect_equal(infer_type(e),

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -354,7 +354,7 @@
354354
</dependency>
355355
<dependency>
356356
<groupId>org.mockito</groupId>
357-
<artifactId>mockito-all</artifactId>
357+
<artifactId>mockito-core</artifactId>
358358
<scope>test</scope>
359359
</dependency>
360360
<dependency>

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -545,7 +545,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
545545

546546
// Post init
547547
_taskScheduler.postStartHook()
548-
_env.metricsSystem.registerSource(new DAGSchedulerSource(dagScheduler))
549548
_env.metricsSystem.registerSource(new BlockManagerSource(_env.blockManager))
550549
_executorAllocationManager.foreach { e =>
551550
_env.metricsSystem.registerSource(e.executorAllocationManagerSource)

core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,21 @@ private[r] class RBackendHandler(server: RBackend)
8888
ctx.close()
8989
}
9090

91+
// Looks up a class given a class name. This function first checks the
92+
// current class loader and if a class is not found, it looks up the class
93+
// in the context class loader. Address [SPARK-5185]
94+
def getStaticClass(objId: String): Class[_] = {
95+
try {
96+
val clsCurrent = Class.forName(objId)
97+
clsCurrent
98+
} catch {
99+
// Use contextLoader if we can't find the JAR in the system class loader
100+
case e: ClassNotFoundException =>
101+
val clsContext = Class.forName(objId, true, Thread.currentThread().getContextClassLoader)
102+
clsContext
103+
}
104+
}
105+
91106
def handleMethodCall(
92107
isStatic: Boolean,
93108
objId: String,
@@ -98,7 +113,7 @@ private[r] class RBackendHandler(server: RBackend)
98113
var obj: Object = null
99114
try {
100115
val cls = if (isStatic) {
101-
Class.forName(objId)
116+
getStaticClass(objId)
102117
} else {
103118
JVMObjectTracker.get(objId) match {
104119
case None => throw new IllegalArgumentException("Object not found " + objId)

0 commit comments

Comments
 (0)