@@ -89,8 +89,8 @@ install.spark <- function(hadoopVersion = "2.7", mirrorUrl = NULL,
8989 }
9090
9191 if (overwrite ) {
92- message(paste0( " Overwrite = TRUE: download and overwrite the tar file" ,
93- " and Spark package directory if they exist." ) )
92+ message(" Overwrite = TRUE: download and overwrite the tar file" ,
93+ " and Spark package directory if they exist." )
9494 }
9595
9696 releaseUrl <- Sys.getenv(" SPARKR_RELEASE_DOWNLOAD_URL" )
@@ -103,12 +103,11 @@ install.spark <- function(hadoopVersion = "2.7", mirrorUrl = NULL,
103103 # can use dir.exists(packageLocalDir) under R 3.2.0 or later
104104 if (! is.na(file.info(packageLocalDir )$ isdir ) && ! overwrite ) {
105105 if (releaseUrl != " " ) {
106- message(paste( packageName , " found, setting SPARK_HOME to" , packageLocalDir ) )
106+ message(packageName , " found, setting SPARK_HOME to " , packageLocalDir )
107107 } else {
108- fmt <- " %s for Hadoop %s found, setting SPARK_HOME to %s"
109- msg <- sprintf(fmt , version , ifelse(hadoopVersion == " without" , " Free build" , hadoopVersion ),
110- packageLocalDir )
111- message(msg )
108+ message(version , " for Hadoop " ,
109+ if (hadoopVersion == " without" ) " Free build" else hadoopVersion ,
110+ " found, setting SPARK_HOME to " , packageLocalDir )
112111 }
113112 Sys.setenv(SPARK_HOME = packageLocalDir )
114113 return (invisible (packageLocalDir ))
@@ -127,26 +126,23 @@ install.spark <- function(hadoopVersion = "2.7", mirrorUrl = NULL,
127126 success <- downloadUrl(releaseUrl , packageLocalPath )
128127 if (! success ) {
129128 unlink(packageLocalPath )
130- stop(paste0( " Fetch failed from " , releaseUrl ) )
129+ stop(" Fetch failed from " , releaseUrl )
131130 }
132131 } else {
133132 robustDownloadTar(mirrorUrl , version , hadoopVersion , packageName , packageLocalPath )
134133 }
135134 }
136135
137- message(sprintf( " Installing to %s " , localDir ) )
136+ message(" Installing to " , localDir )
138137 # There are two ways untar can fail - untar could stop() on errors like incomplete block on file
139138 # or, tar command can return failure code
140139 success <- tryCatch(untar(tarfile = packageLocalPath , exdir = localDir ) == 0 ,
141140 error = function (e ) {
142- message(e )
143- message()
141+ message(e , " \n " )
144142 FALSE
145143 },
146144 warning = function (w ) {
147- # Treat warning as error, add an empty line with message()
148- message(w )
149- message()
145+ message(w , " \n " )
150146 FALSE
151147 })
152148 if (! tarExists || overwrite || ! success ) {
@@ -160,7 +156,7 @@ install.spark <- function(hadoopVersion = "2.7", mirrorUrl = NULL,
160156 if (! success ) stop(" Extract archive failed." )
161157 message(" DONE." )
162158 Sys.setenv(SPARK_HOME = packageLocalDir )
163- message(paste( " SPARK_HOME set to" , packageLocalDir ) )
159+ message(" SPARK_HOME set to " , packageLocalDir )
164160 invisible (packageLocalDir )
165161}
166162
@@ -173,7 +169,7 @@ robustDownloadTar <- function(mirrorUrl, version, hadoopVersion, packageName, pa
173169 if (success ) {
174170 return ()
175171 } else {
176- message(paste0( " Unable to download from mirrorUrl: " , mirrorUrl ) )
172+ message(" Unable to download from mirrorUrl: " , mirrorUrl )
177173 }
178174 } else {
179175 message(" MirrorUrl not provided." )
@@ -201,11 +197,9 @@ robustDownloadTar <- function(mirrorUrl, version, hadoopVersion, packageName, pa
201197 # remove any partially downloaded file
202198 unlink(packageLocalPath )
203199 message(" Unable to download from default mirror site: " , mirrorUrl )
204- msg <- sprintf(paste(" Unable to download Spark %s for Hadoop %s." ,
205- " Please check network connection, Hadoop version," ,
206- " or provide other mirror sites." ),
207- version , ifelse(hadoopVersion == " without" , " Free build" , hadoopVersion ))
208- stop(msg )
200+ stop(" Unable to download Spark " , version ,
201+ " for Hadoop " , if (hadoopVersion == " without" ) " Free build" else hadoopVersion ,
202+ " . Please check network connection, Hadoop version, or provide other mirror sites." )
209203 }
210204}
211205
@@ -222,7 +216,7 @@ getPreferredMirror <- function(version, packageName) {
222216 endPos <- matchInfo + attr(matchInfo , " match.length" ) - 2
223217 mirrorPreferred <- base :: substr(linePreferred , startPos , endPos )
224218 mirrorPreferred <- paste0(mirrorPreferred , " spark" )
225- message(sprintf( " Preferred mirror site found: %s " , mirrorPreferred ) )
219+ message(" Preferred mirror site found: " , mirrorPreferred )
226220 } else {
227221 mirrorPreferred <- NULL
228222 }
@@ -231,24 +225,20 @@ getPreferredMirror <- function(version, packageName) {
231225
232226directDownloadTar <- function (mirrorUrl , version , hadoopVersion , packageName , packageLocalPath ) {
233227 packageRemotePath <- paste0(file.path(mirrorUrl , version , packageName ), " .tgz" )
234- fmt <- " Downloading %s for Hadoop %s from:\n - %s"
235- msg <- sprintf(fmt , version , ifelse(hadoopVersion == " without" , " Free build" , hadoopVersion ),
236- packageRemotePath )
237- message(msg )
228+ message(" Downloading " , version , " for Hadoop " ,
229+ if (hadoopVersion == " without" ) " Free build" else hadoopVersion ,
230+ " from:\n - " , packageRemotePath )
238231 downloadUrl(packageRemotePath , packageLocalPath )
239232}
240233
241234downloadUrl <- function (remotePath , localPath ) {
242235 isFail <- tryCatch(download.file(remotePath , localPath ),
243236 error = function (e ) {
244- message(e )
245- message()
237+ message(e , " \n " )
246238 TRUE
247239 },
248240 warning = function (w ) {
249- # Treat warning as error, add an empty line with message()
250- message(w )
251- message()
241+ message(w , " \n " )
252242 TRUE
253243 })
254244 ! isFail
@@ -279,9 +269,9 @@ sparkCachePath <- function() {
279269 winAppPath <- Sys.getenv(" USERPROFILE" , unset = NA )
280270 }
281271 if (is.na(winAppPath )) {
282- stop(paste( " %LOCALAPPDATA% and %USERPROFILE% not found." ,
283- " Please define the environment variable" ,
284- " or restart and enter an installation path in localDir." ) )
272+ stop(" %LOCALAPPDATA% and %USERPROFILE% not found. " ,
273+ " Please define the environment variable " ,
274+ " or restart and enter an installation path in localDir." )
285275 } else {
286276 path <- file.path(winAppPath , " Apache" , " Spark" , " Cache" )
287277 }
@@ -293,7 +283,7 @@ sparkCachePath <- function() {
293283 Sys.getenv(" XDG_CACHE_HOME" , file.path(Sys.getenv(" HOME" ), " .cache" )), " spark" )
294284 }
295285 } else {
296- stop(sprintf( " Unknown OS: %s " , .Platform $ OS.type ) )
286+ stop(" Unknown OS: " , .Platform $ OS.type )
297287 }
298288 normalizePath(path , mustWork = FALSE )
299289}
@@ -322,7 +312,7 @@ installInstruction <- function(mode) {
322312 " If you need further help, " ,
323313 " contact the administrators of the cluster." )
324314 } else {
325- stop(paste0( " No instruction found for " , mode , " mode. " ) )
315+ stop(" No instruction found for mode " , mode )
326316 }
327317}
328318
0 commit comments