@@ -152,6 +152,11 @@ install.spark <- function(hadoopVersion = "2.7", mirrorUrl = NULL,
152152 })
153153 if (! tarExists || overwrite || ! success ) {
154154 unlink(packageLocalPath )
155+ if (success ) {
156+ # if tar file was not there before (or it was, but we are told to overwrite it),
157+ # and untar is successful - set a flag that we have downloaded (and untar) Spark package.
158+ assign(" .sparkDownloaded" , TRUE , envir = .sparkREnv )
159+ }
155160 }
156161 if (! success ) stop(" Extract archive failed." )
157162 message(" DONE." )
@@ -266,6 +271,7 @@ hadoopVersionName <- function(hadoopVersion) {
266271
267272# The implementation refers to appdirs package: https://pypi.python.org/pypi/appdirs and
268273# adapt to Spark context
274+ # see also sparkCacheRelPathLength()
269275sparkCachePath <- function () {
270276 if (is_windows()) {
271277 winAppPath <- Sys.getenv(" LOCALAPPDATA" , unset = NA )
@@ -282,7 +288,7 @@ sparkCachePath <- function() {
282288 }
283289 } else if (.Platform $ OS.type == " unix" ) {
284290 if (Sys.info()[" sysname" ] == " Darwin" ) {
285- path <- file.path(Sys.getenv(" HOME" ), " Library/ Caches" , " spark" )
291+ path <- file.path(Sys.getenv(" HOME" ), " Library" , " Caches" , " spark" )
286292 } else {
287293 path <- file.path(
288294 Sys.getenv(" XDG_CACHE_HOME" , file.path(Sys.getenv(" HOME" ), " .cache" )), " spark" )
@@ -293,6 +299,16 @@ sparkCachePath <- function() {
293299 normalizePath(path , mustWork = FALSE )
294300}
295301
302+ # Length of the Spark cache specific relative path segments for each platform
303+ # eg. "Apache\Spark\Cache" is 3 in Windows, or "spark" is 1 in unix
304+ # Must match sparkCachePath() exactly.
305+ sparkCacheRelPathLength <- function () {
306+ if (is_windows()) {
307+ 3
308+ } else {
309+ 1
310+ }
311+ }
296312
297313installInstruction <- function (mode ) {
298314 if (mode == " remote" ) {
@@ -310,3 +326,22 @@ installInstruction <- function(mode) {
310326 stop(paste0(" No instruction found for " , mode , " mode." ))
311327 }
312328}
329+
330+ uninstallDownloadedSpark <- function () {
331+ # clean up if Spark was downloaded
332+ sparkDownloaded <- getOne(" .sparkDownloaded" ,
333+ envir = .sparkREnv ,
334+ inherits = TRUE ,
335+ ifnotfound = FALSE )
336+ sparkDownloadedDir <- Sys.getenv(" SPARK_HOME" )
337+ if (sparkDownloaded && nchar(sparkDownloadedDir ) > 0 ) {
338+ unlink(sparkDownloadedDir , recursive = TRUE , force = TRUE )
339+
340+ dirs <- traverseParentDirs(sparkCachePath(), sparkCacheRelPathLength())
341+ lapply(dirs , function (d ) {
342+ if (length(list.files(d , all.files = TRUE , include.dirs = TRUE , no.. = TRUE )) == 0 ) {
343+ unlink(d , recursive = TRUE , force = TRUE )
344+ }
345+ })
346+ }
347+ }
0 commit comments