Skip to content

Commit 98e7ab9

Browse files
committed
review feedback
1 parent f53b148 commit 98e7ab9

File tree

2 files changed

+30
-25
lines changed

2 files changed

+30
-25
lines changed

R/pkg/R/SQLContext.R

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,15 @@ getInternalType <- function(x) {
3838
}
3939

4040
#' Temporary function to reroute old S3 Method call to new
41-
#' We need to check the class of x to ensure it is SQLContext before dispatching
41+
#' This function is specifically implemented to remove SQLContext from the parameter list.
42+
#' It determines the target to route the call by checking the parent of this callsite (say 'func').
43+
#' The target should be called 'func.default'.
44+
#' We need to check the class of x to ensure it is SQLContext/HiveContext before dispatching.
45+
#' @param newFuncSig name of the function the user should call instead in the deprecation message
46+
#' @param x the first parameter of the original call
47+
#' @param ... the rest of parameter to pass along
48+
#' @return whatever the target returns
49+
#' @noRd
4250
dispatchFunc <- function(newFuncSig, x, ...) {
4351
funcName <- as.character(sys.call(sys.parent())[[1]])
4452
f <- get(paste0(funcName, ".default"))

R/pkg/inst/tests/testthat/test_sparkSQL.R

Lines changed: 21 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,21 @@ markUtf8 <- function(s) {
3232
s
3333
}
3434

35+
setHiveContext <- function() {
36+
hiveCtx <- tryCatch({
37+
newJObject("org.apache.spark.sql.hive.test.TestHiveContext", ssc)
38+
},
39+
error = function(err) {
40+
skip("Hive is not build with SparkSQL, skipped")
41+
})
42+
assign(".sparkRHivesc", hiveCtx, envir = .sparkREnv)
43+
hiveCtx
44+
}
45+
46+
unsetHiveContext <- function() {
47+
remove(".sparkRHivesc", envir = .sparkREnv)
48+
}
49+
3550
# Tests for SparkSQL functions in SparkR
3651

3752
sc <- sparkR.init()
@@ -163,20 +178,15 @@ test_that("create DataFrame from RDD", {
163178
list(name = "John", age = 19L, height = 176.5))
164179

165180
ssc <- callJMethod(sc, "sc")
166-
hiveCtx <- tryCatch({
167-
newJObject("org.apache.spark.sql.hive.test.TestHiveContext", ssc)
168-
},
169-
error = function(err) {
170-
skip("Hive is not build with SparkSQL, skipped")
171-
})
172-
assign(".sparkRHivesc", hiveCtx, envir = .sparkREnv)
181+
setHiveContext()
173182
sql("CREATE TABLE people (name string, age double, height float)")
174183
df <- read.df(jsonPathNa, "json", schema)
175184
invisible(insertInto(df, "people"))
176185
expect_equal(collect(sql("SELECT age from people WHERE name = 'Bob'"))$age,
177186
c(16))
178187
expect_equal(collect(sql("SELECT height from people WHERE name ='Bob'"))$height,
179188
c(176.5))
189+
unsetHiveContext()
180190
remove(".sparkRHivesc", envir = .sparkREnv)
181191
})
182192

@@ -955,13 +965,7 @@ test_that("column calculation", {
955965

956966
test_that("test HiveContext", {
957967
ssc <- callJMethod(sc, "sc")
958-
hiveCtx <- tryCatch({
959-
newJObject("org.apache.spark.sql.hive.test.TestHiveContext", ssc)
960-
},
961-
error = function(err) {
962-
skip("Hive is not build with SparkSQL, skipped")
963-
})
964-
assign(".sparkRHivesc", hiveCtx, envir = .sparkREnv)
968+
setHiveContext()
965969
df <- createExternalTable("json", jsonPath, "json")
966970
expect_is(df, "SparkDataFrame")
967971
expect_equal(count(df), 3)
@@ -989,7 +993,7 @@ test_that("test HiveContext", {
989993
expect_is(df5, "SparkDataFrame")
990994
expect_equal(count(df5), 3)
991995
unlink(parquetDataPath)
992-
remove(".sparkRHivesc", envir = .sparkREnv)
996+
unsetHiveContext()
993997
})
994998

995999
test_that("column operators", {
@@ -2138,14 +2142,7 @@ test_that("repartition by columns on DataFrame", {
21382142

21392143
test_that("Window functions on a DataFrame", {
21402144
ssc <- callJMethod(sc, "sc")
2141-
hiveCtx <- tryCatch({
2142-
newJObject("org.apache.spark.sql.hive.test.TestHiveContext", ssc)
2143-
},
2144-
error = function(err) {
2145-
skip("Hive is not build with SparkSQL, skipped")
2146-
})
2147-
2148-
assign(".sparkRHivesc", hiveCtx, envir = .sparkREnv)
2145+
setHiveContext()
21492146
df <- createDataFrame(list(list(1L, "1"), list(2L, "2"), list(1L, "1"), list(2L, "2")),
21502147
schema = c("key", "value"))
21512148
ws <- orderBy(window.partitionBy("key"), "value")
@@ -2170,7 +2167,7 @@ test_that("Window functions on a DataFrame", {
21702167
result <- collect(select(df, over(lead("key", 1), ws), over(lead("value", 1), ws)))
21712168
names(result) <- c("key", "value")
21722169
expect_equal(result, expected)
2173-
remove(".sparkRHivesc", envir = .sparkREnv)
2170+
unsetHiveContext()
21742171
})
21752172

21762173
test_that("createDataFrame sqlContext parameter backward compatibility", {

0 commit comments

Comments
 (0)