Skip to content

Commit 13a2fc5

Browse files
committed
Merge remote-tracking branch 'upstream/master' into interval_add_subtract
Conflicts: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala
2 parents 83ec129 + c6b1a9e commit 13a2fc5

File tree

136 files changed

+1613
-708
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

136 files changed

+1613
-708
lines changed

R/pkg/R/DataFrame.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1328,7 +1328,7 @@ setMethod("write.df",
13281328
jmode <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "saveMode", mode)
13291329
options <- varargsToEnv(...)
13301330
if (!is.null(path)) {
1331-
options[['path']] = path
1331+
options[['path']] <- path
13321332
}
13331333
callJMethod(df@sdf, "save", source, jmode, options)
13341334
})

R/pkg/R/client.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,9 @@ connectBackend <- function(hostname, port, timeout = 6000) {
3636

3737
determineSparkSubmitBin <- function() {
3838
if (.Platform$OS.type == "unix") {
39-
sparkSubmitBinName = "spark-submit"
39+
sparkSubmitBinName <- "spark-submit"
4040
} else {
41-
sparkSubmitBinName = "spark-submit.cmd"
41+
sparkSubmitBinName <- "spark-submit.cmd"
4242
}
4343
sparkSubmitBinName
4444
}

R/pkg/R/group.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ setMethod("count",
8787
setMethod("agg",
8888
signature(x = "GroupedData"),
8989
function(x, ...) {
90-
cols = list(...)
90+
cols <- list(...)
9191
stopifnot(length(cols) > 0)
9292
if (is.character(cols[[1]])) {
9393
cols <- varargsToEnv(...)
@@ -97,7 +97,7 @@ setMethod("agg",
9797
if (!is.null(ns)) {
9898
for (n in ns) {
9999
if (n != "") {
100-
cols[[n]] = alias(cols[[n]], n)
100+
cols[[n]] <- alias(cols[[n]], n)
101101
}
102102
}
103103
}

R/pkg/R/utils.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,8 +41,8 @@ convertJListToRList <- function(jList, flatten, logicalUpperBound = NULL,
4141
if (isInstanceOf(obj, "scala.Tuple2")) {
4242
# JavaPairRDD[Array[Byte], Array[Byte]].
4343

44-
keyBytes = callJMethod(obj, "_1")
45-
valBytes = callJMethod(obj, "_2")
44+
keyBytes <- callJMethod(obj, "_1")
45+
valBytes <- callJMethod(obj, "_2")
4646
res <- list(unserialize(keyBytes),
4747
unserialize(valBytes))
4848
} else {

R/pkg/inst/tests/test_binaryFile.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ context("functions on binary files")
2020
# JavaSparkContext handle
2121
sc <- sparkR.init()
2222

23-
mockFile = c("Spark is pretty.", "Spark is awesome.")
23+
mockFile <- c("Spark is pretty.", "Spark is awesome.")
2424

2525
test_that("saveAsObjectFile()/objectFile() following textFile() works", {
2626
fileName1 <- tempfile(pattern="spark-test", fileext=".tmp")

R/pkg/inst/tests/test_binary_function.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ test_that("zipPartitions() on RDDs", {
7676
expect_equal(actual,
7777
list(list(1, c(1,2), c(1,2,3)), list(2, c(3,4), c(4,5,6))))
7878

79-
mockFile = c("Spark is pretty.", "Spark is awesome.")
79+
mockFile <- c("Spark is pretty.", "Spark is awesome.")
8080
fileName <- tempfile(pattern="spark-test", fileext=".tmp")
8181
writeLines(mockFile, fileName)
8282

R/pkg/inst/tests/test_rdd.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -447,7 +447,7 @@ test_that("zipRDD() on RDDs", {
447447
expect_equal(actual,
448448
list(list(0, 1000), list(1, 1001), list(2, 1002), list(3, 1003), list(4, 1004)))
449449

450-
mockFile = c("Spark is pretty.", "Spark is awesome.")
450+
mockFile <- c("Spark is pretty.", "Spark is awesome.")
451451
fileName <- tempfile(pattern="spark-test", fileext=".tmp")
452452
writeLines(mockFile, fileName)
453453

@@ -483,7 +483,7 @@ test_that("cartesian() on RDDs", {
483483
actual <- collect(cartesian(rdd, emptyRdd))
484484
expect_equal(actual, list())
485485

486-
mockFile = c("Spark is pretty.", "Spark is awesome.")
486+
mockFile <- c("Spark is pretty.", "Spark is awesome.")
487487
fileName <- tempfile(pattern="spark-test", fileext=".tmp")
488488
writeLines(mockFile, fileName)
489489

R/pkg/inst/tests/test_textFile.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ context("the textFile() function")
2020
# JavaSparkContext handle
2121
sc <- sparkR.init()
2222

23-
mockFile = c("Spark is pretty.", "Spark is awesome.")
23+
mockFile <- c("Spark is pretty.", "Spark is awesome.")
2424

2525
test_that("textFile() on a local file returns an RDD", {
2626
fileName <- tempfile(pattern="spark-test", fileext=".tmp")

R/pkg/inst/tests/test_utils.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ test_that("cleanClosure on R functions", {
119119
# Test for overriding variables in base namespace (Issue: SparkR-196).
120120
nums <- as.list(1:10)
121121
rdd <- parallelize(sc, nums, 2L)
122-
t = 4 # Override base::t in .GlobalEnv.
122+
t <- 4 # Override base::t in .GlobalEnv.
123123
f <- function(x) { x > t }
124124
newF <- cleanClosure(f)
125125
env <- environment(newF)

build/mvn

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,10 +112,17 @@ install_scala() {
112112
# the environment
113113
ZINC_PORT=${ZINC_PORT:-"3030"}
114114

115+
# Check for the `--force` flag dictating that `mvn` should be downloaded
116+
# regardless of whether the system already has a `mvn` install
117+
if [ "$1" == "--force" ]; then
118+
FORCE_MVN=1
119+
shift
120+
fi
121+
115122
# Install Maven if necessary
116123
MVN_BIN="$(command -v mvn)"
117124

118-
if [ ! "$MVN_BIN" ]; then
125+
if [ ! "$MVN_BIN" -o -n "$FORCE_MVN" ]; then
119126
install_mvn
120127
fi
121128

@@ -139,5 +146,7 @@ fi
139146
# Set any `mvn` options if not already present
140147
export MAVEN_OPTS=${MAVEN_OPTS:-"$_COMPILE_JVM_OPTS"}
141148

149+
echo "Using \`mvn\` from path: $MVN_BIN"
150+
142151
# Last, call the `mvn` command as usual
143152
${MVN_BIN} "$@"

0 commit comments

Comments
 (0)