File tree Expand file tree Collapse file tree 3 files changed +20
-14
lines changed Expand file tree Collapse file tree 3 files changed +20
-14
lines changed Original file line number Diff line number Diff line change @@ -364,12 +364,6 @@ sparkR.session <- function(
364364
365365 sparkConfigMap <- convertNamedListToEnv(sparkConfig )
366366
367- # NOTE(shivaram): Set default warehouse dir to tmpdir to meet CRAN requirements
368- # See SPARK-18817 for more details
369- if (! exists(" spark.sql.default.warehouse.dir" , envir = sparkConfigMap )) {
370- assign(" spark.sql.default.warehouse.dir" , tempdir(), envir = sparkConfigMap )
371- }
372-
373367 namedParams <- list (... )
374368 if (length(namedParams ) > 0 ) {
375369 paramMap <- convertNamedListToEnv(namedParams )
@@ -383,6 +377,12 @@ sparkR.session <- function(
383377 overrideEnvs(sparkConfigMap , paramMap )
384378 }
385379
380+ # NOTE(shivaram): Set default warehouse dir to tmpdir to meet CRAN requirements
381+ # See SPARK-18817 for more details
382+ if (! exists(" spark.sql.default.warehouse.dir" , envir = sparkConfigMap )) {
383+ assign(" spark.sql.default.warehouse.dir" , tempdir(), envir = sparkConfigMap )
384+ }
385+
386386 deployMode <- " "
387387 if (exists(" spark.submit.deployMode" , envir = sparkConfigMap )) {
388388 deployMode <- sparkConfigMap [[" spark.submit.deployMode" ]]
Original file line number Diff line number Diff line change @@ -72,6 +72,20 @@ test_that("repeatedly starting and stopping SparkSession", {
7272 }
7373})
7474
75+ test_that(" Default warehouse dir should be set to tempdir" , {
76+ sparkR.session.stop()
77+ sparkR.session(enableHiveSupport = FALSE )
78+
79+ # Create a temporary table
80+ sql(" CREATE TABLE people_warehouse_test" )
81+ # spark-warehouse should be written only tempdir() and not current working directory
82+ res <- list.files(path = " ." , pattern = " .*spark-warehouse.*" ,
83+ recursive = TRUE , include.dirs = TRUE )
84+ expect_equal(length(res ), 0 )
85+ result <- sql(" DROP TABLE people_warehouse_test" )
86+ sparkR.session.stop()
87+ })
88+
7589test_that(" rdd GC across sparkR.stop" , {
7690 sc <- sparkR.sparkContext() # sc should get id 0
7791 rdd1 <- parallelize(sc , 1 : 20 , 2L ) # rdd1 should get id 1
Original file line number Diff line number Diff line change @@ -2165,14 +2165,6 @@ test_that("SQL error message is returned from JVM", {
21652165 expect_equal(grepl(" blah" , retError ), TRUE )
21662166})
21672167
2168- test_that(" Default warehouse dir should be set to tempdir" , {
2169- # nothing should be written outside tempdir() without explicit user permission
2170- inital_working_directory_files <- list.files()
2171- result <- sql(" CREATE TABLE warehouse" )
2172- expect_equal(inital_working_directory_files , list.files())
2173- result <- sql(" DROP TABLE warehouse" )
2174- })
2175-
21762168irisDF <- suppressWarnings(createDataFrame(iris ))
21772169
21782170test_that(" Method as.data.frame as a synonym for collect()" , {
You can’t perform that action at this time.
0 commit comments