Skip to content

Commit d84ba06

Browse files
committed
Specify path in jsonUrl and add alias to install function doc
1 parent 5decac6 commit d84ba06

File tree

3 files changed

+15
-6
lines changed

3 files changed

+15
-6
lines changed

R/pkg/R/install.R

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
# from CRAN.
2020

2121
#' Download and Install Apache Spark to a Local Directory
22-
#'
22+
#'
2323
#' \code{install.spark} downloads and installs Spark to a local directory if
2424
#' it is not found. The Spark version we use is the same as the SparkR version.
2525
#' Users can specify a desired Hadoop version, the remote mirror site, and
@@ -59,6 +59,7 @@
5959
#' @return \code{install.spark} returns the local directory where Spark is found or installed
6060
#' @rdname install.spark
6161
#' @name install.spark
62+
#' @aliases install.spark
6263
#' @export
6364
#' @examples
6465
#'\dontrun{
@@ -131,7 +132,7 @@ robust_download_tar <- function(mirrorUrl, version, hadoopVersion, packageName,
131132

132133
# step 2: use url suggested from apache website
133134
message("Looking for site suggested from apache website...")
134-
mirrorUrl <- get_preferred_mirror()
135+
mirrorUrl <- get_preferred_mirror(version, packageName)
135136
if (!is.null(mirrorUrl)) {
136137
success <- direct_download_tar(mirrorUrl, version, hadoopVersion,
137138
packageName, packageLocalPath)
@@ -156,8 +157,11 @@ robust_download_tar <- function(mirrorUrl, version, hadoopVersion, packageName,
156157
}
157158
}
158159

159-
get_preferred_mirror <- function() {
160-
jsonUrl <- "http://www.apache.org/dyn/closer.cgi?as_json=1"
160+
get_preferred_mirror <- function(version, packageName) {
161+
jsonUrl <- paste0("http://www.apache.org/dyn/closer.cgi?path=",
162+
file.path("spark", version, packageName),
163+
".tgz&as_json=1")
164+
# jsonUrl <- "http://www.apache.org/dyn/closer.cgi?as_json=1"
161165
textLines <- readLines(jsonUrl, warn = FALSE)
162166
rowNum <- grep("\"preferred\"", textLines)
163167
linePreferred <- textLines[rowNum]
@@ -185,6 +189,7 @@ direct_download_tar <- function(mirrorUrl, version, hadoopVersion, packageName,
185189
isFail <- tryCatch(download.file(packageRemotePath, packageLocalPath),
186190
error = function(e) {
187191
message(sprintf("Fetch failed from %s", mirrorUrl))
192+
print(e)
188193
TRUE
189194
})
190195
!isFail

R/pkg/R/sparkR.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -366,8 +366,8 @@ sparkR.session <- function(
366366
overrideEnvs(sparkConfigMap, paramMap)
367367
}
368368
# do not download if it is run in the sparkR shell
369-
if (!grepl(".*shell\\.R$", Sys.getenv("R_PROFILE_USER"), perl = TRUE)) {
370-
if (!nzchar(master) || is_master_local(master)) {
369+
if (!nzchar(master) || is_master_local(master)) {
370+
if (!is_sparkR_shell()) {
371371
if (is.na(file.info(sparkHome)$isdir)) {
372372
msg <- paste0("Spark not found in SPARK_HOME: ",
373373
sparkHome,

R/pkg/R/utils.R

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -693,3 +693,7 @@ getSparkContext <- function() {
693693
is_master_local <- function(master) {
694694
grepl("^local(\\[([0-9]+|\\*)\\])?$", master, perl = TRUE)
695695
}
696+
697+
is_sparkR_shell <- function() {
698+
grepl(".*shell\\.R$", Sys.getenv("R_PROFILE_USER"), perl = TRUE)
699+
}

0 commit comments

Comments
 (0)