From 05e8819066b12961c6362ecfd70626470b6fe527 Mon Sep 17 00:00:00 2001 From: zero323 Date: Sun, 26 Jan 2020 06:21:17 +0100 Subject: [PATCH 1/4] Switch to testthat >= 2.0.0 --- R/pkg/tests/fulltests/test_context.R | 4 ++++ R/pkg/tests/fulltests/test_includePackage.R | 2 ++ R/pkg/tests/fulltests/test_sparkSQL.R | 1 + R/pkg/tests/fulltests/test_textFile.R | 1 + R/pkg/tests/run-all.R | 23 +++++++++++++++------ appveyor.yml | 5 +---- docs/README.md | 7 +++---- docs/building-spark.md | 11 +++++----- 8 files changed, 34 insertions(+), 20 deletions(-) diff --git a/R/pkg/tests/fulltests/test_context.R b/R/pkg/tests/fulltests/test_context.R index eb8d2a700e1e..b9139154bc16 100644 --- a/R/pkg/tests/fulltests/test_context.R +++ b/R/pkg/tests/fulltests/test_context.R @@ -84,6 +84,7 @@ test_that("rdd GC across sparkR.stop", { countRDD(rdd3) countRDD(rdd4) sparkR.session.stop() + expect_true(TRUE) }) test_that("job group functions can be called", { @@ -93,6 +94,7 @@ test_that("job group functions can be called", { clearJobGroup() sparkR.session.stop() + expect_true(TRUE) }) test_that("job description and local properties can be set and got", { @@ -131,6 +133,7 @@ test_that("utility function can be called", { sparkR.sparkContext(master = sparkRTestMaster) setLogLevel("ERROR") sparkR.session.stop() + expect_true(TRUE) }) test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whitelist", { @@ -234,4 +237,5 @@ test_that("SPARK-25234: parallelize should not have integer overflow", { # 47000 * 47000 exceeds integer range parallelize(sc, 1:47000, 47000) sparkR.session.stop() + expect_true(TRUE) }) diff --git a/R/pkg/tests/fulltests/test_includePackage.R b/R/pkg/tests/fulltests/test_includePackage.R index f4ea0d1b5cb2..916361ff4c79 100644 --- a/R/pkg/tests/fulltests/test_includePackage.R +++ b/R/pkg/tests/fulltests/test_includePackage.R @@ -39,6 +39,7 @@ test_that("include inside function", { data <- lapplyPartition(rdd, generateData) actual <- collectRDD(data) } + expect_true(TRUE) }) test_that("use include package", { @@ -55,6 +56,7 @@ test_that("use include package", { data <- lapplyPartition(rdd, generateData) actual <- collectRDD(data) } + expect_true(TRUE) }) sparkR.session.stop() diff --git a/R/pkg/tests/fulltests/test_sparkSQL.R b/R/pkg/tests/fulltests/test_sparkSQL.R index 4fcc2baa0546..175d5212cbbf 100644 --- a/R/pkg/tests/fulltests/test_sparkSQL.R +++ b/R/pkg/tests/fulltests/test_sparkSQL.R @@ -1382,6 +1382,7 @@ test_that("column operators", { c5 <- c2 ^ c3 ^ c4 c6 <- c2 %<=>% c3 c7 <- !c6 + expect_true(TRUE) }) test_that("column functions", { diff --git a/R/pkg/tests/fulltests/test_textFile.R b/R/pkg/tests/fulltests/test_textFile.R index be2d2711ff88..046018c7c2a2 100644 --- a/R/pkg/tests/fulltests/test_textFile.R +++ b/R/pkg/tests/fulltests/test_textFile.R @@ -75,6 +75,7 @@ test_that("several transformations on RDD created by textFile()", { collectRDD(rdd) unlink(fileName) + expect_true(TRUE) }) test_that("textFile() followed by a saveAsTextFile() returns the same content", { diff --git a/R/pkg/tests/run-all.R b/R/pkg/tests/run-all.R index 1e9641855888..2337880c3d40 100644 --- a/R/pkg/tests/run-all.R +++ b/R/pkg/tests/run-all.R @@ -20,7 +20,6 @@ library(SparkR) # SPARK-25572 if (identical(Sys.getenv("NOT_CRAN"), "true")) { - # Turn all warnings into errors options("warn" = 2) @@ -60,11 +59,23 @@ if (identical(Sys.getenv("NOT_CRAN"), "true")) { if (identical(Sys.getenv("NOT_CRAN"), "true")) { # set random seed for predictable results. mostly for base's sample() in tree and classification set.seed(42) - # for testthat 1.0.2 later, change reporter from "summary" to default_reporter() - testthat:::run_tests("SparkR", - file.path(sparkRDir, "pkg", "tests", "fulltests"), - NULL, - "summary") + + # To be removed once testthat 1.x is removed from all builds + if (grepl("^1\\..*", installed.packages()["testthat", "Version"])) { + # testthat 1.x + test_runner <- testthat:::run_tests + reporter <- "summary" + + } else { + # testthat >= 2.0.0 + test_runner <- testthat:::test_package_dir + reporter <- testthat::default_reporter() + } + + test_runner("SparkR", + file.path(sparkRDir, "pkg", "tests", "fulltests"), + NULL, + reporter) } SparkR:::uninstallDownloadedSpark() diff --git a/appveyor.yml b/appveyor.yml index 00c688ba18eb..bea20313b649 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -43,11 +43,8 @@ install: - ps: .\dev\appveyor-install-dependencies.ps1 # Required package for R unit tests - cmd: R -e "install.packages(c('knitr', 'rmarkdown', 'e1071', 'survival', 'arrow'), repos='https://cloud.r-project.org/')" - # Here, we use the fixed version of testthat. For more details, please see SPARK-22817. - # As of devtools 2.1.0, it requires testthat higher then 2.1.1 as a dependency. SparkR test requires testthat 1.0.2. - # Therefore, we don't use devtools but installs it directly from the archive including its dependencies. - cmd: R -e "install.packages(c('crayon', 'praise', 'R6'), repos='https://cloud.r-project.org/')" - - cmd: R -e "install.packages('https://cloud.r-project.org/src/contrib/Archive/testthat/testthat_1.0.2.tar.gz', repos=NULL, type='source')" + - cmd: R -e "install.packages('testthat', repos='https://cloud.r-project.org/')" - cmd: R -e "packageVersion('knitr'); packageVersion('rmarkdown'); packageVersion('testthat'); packageVersion('e1071'); packageVersion('survival'); packageVersion('arrow')" build_script: diff --git a/docs/README.md b/docs/README.md index ef849d53daf7..2001de6207d1 100644 --- a/docs/README.md +++ b/docs/README.md @@ -6,9 +6,9 @@ license: | The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - + http://www.apache.org/licenses/LICENSE-2.0 - + Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,9 +39,8 @@ installed. Also install the following libraries: $ sudo gem install jekyll jekyll-redirect-from rouge # Following is needed only for generating API docs $ sudo pip install sphinx pypandoc mkdocs -$ sudo Rscript -e 'install.packages(c("knitr", "devtools", "rmarkdown"), repos="https://cloud.r-project.org/")' +$ sudo Rscript -e 'install.packages(c("knitr", "devtools", "testthat", "rmarkdown"), repos="https://cloud.r-project.org/")' $ sudo Rscript -e 'devtools::install_version("roxygen2", version = "5.0.1", repos="https://cloud.r-project.org/")' -$ sudo Rscript -e 'devtools::install_version("testthat", version = "1.0.2", repos="https://cloud.r-project.org/")' ``` Note: If you are on a system with both Ruby 1.9 and Ruby 2.0 you may need to replace gem with gem2.0. diff --git a/docs/building-spark.md b/docs/building-spark.md index 580f98208673..77ab7900dc4a 100644 --- a/docs/building-spark.md +++ b/docs/building-spark.md @@ -9,9 +9,9 @@ license: | The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - + http://www.apache.org/licenses/LICENSE-2.0 - + Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -70,9 +70,9 @@ This will build Spark distribution along with Python pip and R packages. For mor ## Specifying the Hadoop Version and Enabling YARN -You can specify the exact version of Hadoop to compile against through the `hadoop.version` property. +You can specify the exact version of Hadoop to compile against through the `hadoop.version` property. -You can enable the `yarn` profile and optionally set the `yarn.version` property if it is different +You can enable the `yarn` profile and optionally set the `yarn.version` property if it is different from `hadoop.version`. Example: @@ -238,8 +238,7 @@ The run-tests script also can be limited to a specific Python version or a speci To run the SparkR tests you will need to install the [knitr](https://cran.r-project.org/package=knitr), [rmarkdown](https://cran.r-project.org/package=rmarkdown), [testthat](https://cran.r-project.org/package=testthat), [e1071](https://cran.r-project.org/package=e1071) and [survival](https://cran.r-project.org/package=survival) packages first: - Rscript -e "install.packages(c('knitr', 'rmarkdown', 'devtools', 'e1071', 'survival'), repos='https://cloud.r-project.org/')" - Rscript -e "devtools::install_version('testthat', version = '1.0.2', repos='https://cloud.r-project.org/')" + Rscript -e "install.packages(c('knitr', 'rmarkdown', 'devtools', 'testthat', 'e1071', 'survival'), repos='https://cloud.r-project.org/')" You can run just the SparkR tests using the command: From 69544ae1a0bd72112f65354dc772a61dbcff0fdf Mon Sep 17 00:00:00 2001 From: zero323 Date: Sun, 26 Jan 2020 07:03:25 +0100 Subject: [PATCH 2/4] Apply style suggestions --- R/pkg/tests/run-all.R | 6 +++--- appveyor.yml | 3 +-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/R/pkg/tests/run-all.R b/R/pkg/tests/run-all.R index 2337880c3d40..47df7885d1a2 100644 --- a/R/pkg/tests/run-all.R +++ b/R/pkg/tests/run-all.R @@ -73,9 +73,9 @@ if (identical(Sys.getenv("NOT_CRAN"), "true")) { } test_runner("SparkR", - file.path(sparkRDir, "pkg", "tests", "fulltests"), - NULL, - reporter) + file.path(sparkRDir, "pkg", "tests", "fulltests"), + NULL, + reporter) } SparkR:::uninstallDownloadedSpark() diff --git a/appveyor.yml b/appveyor.yml index bea20313b649..5d98260265b1 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -43,8 +43,7 @@ install: - ps: .\dev\appveyor-install-dependencies.ps1 # Required package for R unit tests - cmd: R -e "install.packages(c('knitr', 'rmarkdown', 'e1071', 'survival', 'arrow'), repos='https://cloud.r-project.org/')" - - cmd: R -e "install.packages(c('crayon', 'praise', 'R6'), repos='https://cloud.r-project.org/')" - - cmd: R -e "install.packages('testthat', repos='https://cloud.r-project.org/')" + - cmd: R -e "install.packages(c('crayon', 'praise', 'R6', 'testthat'), repos='https://cloud.r-project.org/')" - cmd: R -e "packageVersion('knitr'); packageVersion('rmarkdown'); packageVersion('testthat'); packageVersion('e1071'); packageVersion('survival'); packageVersion('arrow')" build_script: From c7ed64af9e697b3619779857dd820832176b3be3 Mon Sep 17 00:00:00 2001 From: zero323 Date: Sun, 26 Jan 2020 14:59:30 +0100 Subject: [PATCH 3/4] Replace installed.packages with packageVersion --- R/pkg/tests/run-all.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/pkg/tests/run-all.R b/R/pkg/tests/run-all.R index 47df7885d1a2..ba85e14f2de2 100644 --- a/R/pkg/tests/run-all.R +++ b/R/pkg/tests/run-all.R @@ -61,7 +61,7 @@ if (identical(Sys.getenv("NOT_CRAN"), "true")) { set.seed(42) # To be removed once testthat 1.x is removed from all builds - if (grepl("^1\\..*", installed.packages()["testthat", "Version"])) { + if (grepl("^1\\..*", packageVersion("testthat"))) { # testthat 1.x test_runner <- testthat:::run_tests reporter <- "summary" From 56b1ba966467f5677572d70fa5c4d75bc0c66789 Mon Sep 17 00:00:00 2001 From: zero323 Date: Tue, 28 Jan 2020 15:31:45 +0100 Subject: [PATCH 4/4] Add JIRA ticket to TODO removal comment Signed-off-by: zero323 --- R/pkg/tests/run-all.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/pkg/tests/run-all.R b/R/pkg/tests/run-all.R index ba85e14f2de2..bf02ecdad66f 100644 --- a/R/pkg/tests/run-all.R +++ b/R/pkg/tests/run-all.R @@ -60,7 +60,7 @@ if (identical(Sys.getenv("NOT_CRAN"), "true")) { # set random seed for predictable results. mostly for base's sample() in tree and classification set.seed(42) - # To be removed once testthat 1.x is removed from all builds + # TODO (SPARK-30663) To be removed once testthat 1.x is removed from all builds if (grepl("^1\\..*", packageVersion("testthat"))) { # testthat 1.x test_runner <- testthat:::run_tests