Skip to content

Commit 64756de

Browse files
committed
concise message, improve doc for windows, fix regex match
1 parent f37a07c commit 64756de

File tree

4 files changed

+12
-14
lines changed

4 files changed

+12
-14
lines changed

R/check-cran.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,6 @@ $FWDIR/create-docs.sh
4747

4848
VERSION=`grep Version $FWDIR/pkg/DESCRIPTION | awk '{print $NF}'`
4949

50-
"$R_SCRIPT_PATH/"R CMD check --as-cran --no-tests SparkR_"$VERSION".tar.gz
50+
"$R_SCRIPT_PATH/"R CMD check --as-cran SparkR_"$VERSION".tar.gz
5151

5252
popd > /dev/null

R/pkg/R/install.R

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
#' \code{without-hadoop}.
3737
#'
3838
#' @param hadoopVersion Version of Hadoop to install. Default is \code{"2.7"}. It can take other
39-
#' version number in the format of "int.int".
39+
#' version number in the format of "x.y" where x and y are integer.
4040
#' If \code{hadoopVersion = "without"}, "Hadoop free" build is installed.
4141
#' See
4242
#' \href{http://spark.apache.org/docs/latest/hadoop-provided.html}{
@@ -50,11 +50,9 @@
5050
#' \itemize{
5151
#' \item Mac OS X: \file{~/Library/Caches/spark}
5252
#' \item Unix: \env{$XDG_CACHE_HOME} if defined, otherwise \file{~/.cache/spark}
53-
#' \item Win XP:
54-
#' \file{C:\\Documents and Settings\\<username>\\Local Settings\\Application
55-
#' Data\\spark\\spark\\Cache}
56-
#' \item Win Vista:
57-
#' \file{C:\\Users\\<username>\\AppData\\Local\\spark\\spark\\Cache}
53+
#' \item Windows: \file{\%LOCALAPPDATA\%\\spark\\spark\\Cache}. See
54+
#' \href{https://www.microsoft.com/security/portal/mmpc/shared/variables.aspx}{
55+
#' Windows Common Folder Variables} about \%LOCALAPPDATA\%
5856
#' }
5957
#' @param overwrite If \code{TRUE}, download and overwrite the existing tar file in localDir
6058
#' and force re-install Spark (in case the local directory or file is corrupted)
@@ -210,7 +208,7 @@ hadoop_version_name <- function(hadoopVersion) {
210208
spark_cache_path <- function() {
211209
if (.Platform$OS.type == "windows") {
212210
winAppPath <- Sys.getenv("%LOCALAPPDATA%", unset = NA)
213-
if (is.null(winAppPath)) {
211+
if (is.na(winAppPath)) {
214212
msg <- paste("%LOCALAPPDATA% not found.",
215213
"Please define the environment variable",
216214
"or restart and enter an installation path in localDir.")

R/pkg/R/sparkR.R

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -365,19 +365,19 @@ sparkR.session <- function(
365365
}
366366
overrideEnvs(sparkConfigMap, paramMap)
367367
}
368+
# do not download if it is run in the sparkR shell
368369
if (!grepl(".*shell\\.R$", Sys.getenv("R_PROFILE_USER"), perl = TRUE)) {
369370
if (!nzchar(master) || is_master_local(master)) {
370371
if (is.na(file.info(sparkHome)$isdir)) {
371-
fmt <- paste0("Spark not found in SPARK_HOME: %s.\n",
372-
"To search in the cache directory. ",
372+
msg <- paste0("Spark not found in SPARK_HOME: ",
373+
sparkHome,
374+
" .\nTo search in the cache directory. ",
373375
"Installation will start if not found.")
374-
msg <- sprintf(fmt, sparkHome)
375376
message(msg)
376377
packageLocalDir <- install.spark()
377378
sparkHome <- packageLocalDir
378379
} else {
379-
fmt <- "Spark package is found in SPARK_HOME: %s"
380-
msg <- sprintf(fmt, sparkHome)
380+
msg <- paste0("Spark package is found in SPARK_HOME: ", sparkHome)
381381
message(msg)
382382
}
383383
}

R/pkg/R/utils.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -691,5 +691,5 @@ getSparkContext <- function() {
691691
}
692692

693693
is_master_local <- function(master) {
694-
grepl("^local(\\[[0-9\\*]+\\])?$", master, perl = TRUE)
694+
grepl("^local(\\[([0-9]+|\\*)\\])?$", master, perl = TRUE)
695695
}

0 commit comments

Comments
 (0)