Skip to content

Commit 93c743f

Browse files
committed
[SPARK-17577][FOLLOW-UP][SPARKR] SparkR spark.addFile supports adding directory recursively
## What changes were proposed in this pull request? #15140 exposed ```JavaSparkContext.addFile(path: String, recursive: Boolean)``` to Python/R, then we can update SparkR ```spark.addFile``` to support adding directory recursively. ## How was this patch tested? Added unit test. Author: Yanbo Liang <[email protected]> Closes #15216 from yanboliang/spark-17577-2.
1 parent 00be16d commit 93c743f

File tree

2 files changed

+29
-2
lines changed

2 files changed

+29
-2
lines changed

R/pkg/R/context.R

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -231,17 +231,22 @@ setCheckpointDir <- function(sc, dirName) {
231231
#' filesystems), or an HTTP, HTTPS or FTP URI. To access the file in Spark jobs,
232232
#' use spark.getSparkFiles(fileName) to find its download location.
233233
#'
234+
#' A directory can be given if the recursive option is set to true.
235+
#' Currently directories are only supported for Hadoop-supported filesystems.
236+
#' Refer Hadoop-supported filesystems at \url{https://wiki.apache.org/hadoop/HCFS}.
237+
#'
234238
#' @rdname spark.addFile
235239
#' @param path The path of the file to be added
240+
#' @param recursive Whether to add files recursively from the path. Default is FALSE.
236241
#' @export
237242
#' @examples
238243
#'\dontrun{
239244
#' spark.addFile("~/myfile")
240245
#'}
241246
#' @note spark.addFile since 2.1.0
242-
spark.addFile <- function(path) {
247+
spark.addFile <- function(path, recursive = FALSE) {
243248
sc <- getSparkContext()
244-
invisible(callJMethod(sc, "addFile", suppressWarnings(normalizePath(path))))
249+
invisible(callJMethod(sc, "addFile", suppressWarnings(normalizePath(path)), recursive))
245250
}
246251

247252
#' Get the root directory that contains files added through spark.addFile.

R/pkg/inst/tests/testthat/test_context.R

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -169,6 +169,7 @@ test_that("spark.lapply should perform simple transforms", {
169169

170170
test_that("add and get file to be downloaded with Spark job on every node", {
171171
sparkR.sparkContext()
172+
# Test add file.
172173
path <- tempfile(pattern = "hello", fileext = ".txt")
173174
filename <- basename(path)
174175
words <- "Hello World!"
@@ -177,5 +178,26 @@ test_that("add and get file to be downloaded with Spark job on every node", {
177178
download_path <- spark.getSparkFiles(filename)
178179
expect_equal(readLines(download_path), words)
179180
unlink(path)
181+
182+
# Test add directory recursively.
183+
path <- paste0(tempdir(), "/", "recursive_dir")
184+
dir.create(path)
185+
dir_name <- basename(path)
186+
path1 <- paste0(path, "/", "hello.txt")
187+
file.create(path1)
188+
sub_path <- paste0(path, "/", "sub_hello")
189+
dir.create(sub_path)
190+
path2 <- paste0(sub_path, "/", "sub_hello.txt")
191+
file.create(path2)
192+
words <- "Hello World!"
193+
sub_words <- "Sub Hello World!"
194+
writeLines(words, path1)
195+
writeLines(sub_words, path2)
196+
spark.addFile(path, recursive = TRUE)
197+
download_path1 <- spark.getSparkFiles(paste0(dir_name, "/", "hello.txt"))
198+
expect_equal(readLines(download_path1), words)
199+
download_path2 <- spark.getSparkFiles(paste0(dir_name, "/", "sub_hello/sub_hello.txt"))
200+
expect_equal(readLines(download_path2), sub_words)
201+
unlink(path, recursive = TRUE)
180202
sparkR.session.stop()
181203
})

0 commit comments

Comments
 (0)