@@ -32,6 +32,21 @@ markUtf8 <- function(s) {
3232 s
3333}
3434
35+ setHiveContext <- function () {
36+ hiveCtx <- tryCatch({
37+ newJObject(" org.apache.spark.sql.hive.test.TestHiveContext" , ssc )
38+ },
39+ error = function (err ) {
40+ skip(" Hive is not build with SparkSQL, skipped" )
41+ })
42+ assign(" .sparkRHivesc" , hiveCtx , envir = .sparkREnv )
43+ hiveCtx
44+ }
45+
46+ unsetHiveContext <- function () {
47+ remove(" .sparkRHivesc" , envir = .sparkREnv )
48+ }
49+
3550# Tests for SparkSQL functions in SparkR
3651
3752sc <- sparkR.init()
@@ -163,20 +178,15 @@ test_that("create DataFrame from RDD", {
163178 list (name = " John" , age = 19L , height = 176.5 ))
164179
165180 ssc <- callJMethod(sc , " sc" )
166- hiveCtx <- tryCatch({
167- newJObject(" org.apache.spark.sql.hive.test.TestHiveContext" , ssc )
168- },
169- error = function (err ) {
170- skip(" Hive is not build with SparkSQL, skipped" )
171- })
172- assign(" .sparkRHivesc" , hiveCtx , envir = .sparkREnv )
181+ setHiveContext()
173182 sql(" CREATE TABLE people (name string, age double, height float)" )
174183 df <- read.df(jsonPathNa , " json" , schema )
175184 invisible (insertInto(df , " people" ))
176185 expect_equal(collect(sql(" SELECT age from people WHERE name = 'Bob'" ))$ age ,
177186 c(16 ))
178187 expect_equal(collect(sql(" SELECT height from people WHERE name ='Bob'" ))$ height ,
179188 c(176.5 ))
189+ unsetHiveContext()
180190 remove(" .sparkRHivesc" , envir = .sparkREnv )
181191})
182192
@@ -955,13 +965,7 @@ test_that("column calculation", {
955965
956966test_that(" test HiveContext" , {
957967 ssc <- callJMethod(sc , " sc" )
958- hiveCtx <- tryCatch({
959- newJObject(" org.apache.spark.sql.hive.test.TestHiveContext" , ssc )
960- },
961- error = function (err ) {
962- skip(" Hive is not build with SparkSQL, skipped" )
963- })
964- assign(" .sparkRHivesc" , hiveCtx , envir = .sparkREnv )
968+ setHiveContext()
965969 df <- createExternalTable(" json" , jsonPath , " json" )
966970 expect_is(df , " SparkDataFrame" )
967971 expect_equal(count(df ), 3 )
@@ -989,7 +993,7 @@ test_that("test HiveContext", {
989993 expect_is(df5 , " SparkDataFrame" )
990994 expect_equal(count(df5 ), 3 )
991995 unlink(parquetDataPath )
992- remove( " .sparkRHivesc " , envir = .sparkREnv )
996+ unsetHiveContext( )
993997})
994998
995999test_that(" column operators" , {
@@ -2138,14 +2142,7 @@ test_that("repartition by columns on DataFrame", {
21382142
21392143test_that(" Window functions on a DataFrame" , {
21402144 ssc <- callJMethod(sc , " sc" )
2141- hiveCtx <- tryCatch({
2142- newJObject(" org.apache.spark.sql.hive.test.TestHiveContext" , ssc )
2143- },
2144- error = function (err ) {
2145- skip(" Hive is not build with SparkSQL, skipped" )
2146- })
2147-
2148- assign(" .sparkRHivesc" , hiveCtx , envir = .sparkREnv )
2145+ setHiveContext()
21492146 df <- createDataFrame(list (list (1L , " 1" ), list (2L , " 2" ), list (1L , " 1" ), list (2L , " 2" )),
21502147 schema = c(" key" , " value" ))
21512148 ws <- orderBy(window.partitionBy(" key" ), " value" )
@@ -2170,7 +2167,7 @@ test_that("Window functions on a DataFrame", {
21702167 result <- collect(select(df , over(lead(" key" , 1 ), ws ), over(lead(" value" , 1 ), ws )))
21712168 names(result ) <- c(" key" , " value" )
21722169 expect_equal(result , expected )
2173- remove( " .sparkRHivesc " , envir = .sparkREnv )
2170+ unsetHiveContext( )
21742171})
21752172
21762173test_that(" createDataFrame sqlContext parameter backward compatibility" , {
0 commit comments