Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions R/pkg/tests/fulltests/test_context.R
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ test_that("rdd GC across sparkR.stop", {
countRDD(rdd3)
countRDD(rdd4)
sparkR.session.stop()
expect_true(TRUE)
})

test_that("job group functions can be called", {
Expand All @@ -93,6 +94,7 @@ test_that("job group functions can be called", {
clearJobGroup()

sparkR.session.stop()
expect_true(TRUE)
})

test_that("job description and local properties can be set and got", {
Expand Down Expand Up @@ -131,6 +133,7 @@ test_that("utility function can be called", {
sparkR.sparkContext(master = sparkRTestMaster)
setLogLevel("ERROR")
sparkR.session.stop()
expect_true(TRUE)
})

test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whitelist", {
Expand Down Expand Up @@ -234,4 +237,5 @@ test_that("SPARK-25234: parallelize should not have integer overflow", {
# 47000 * 47000 exceeds integer range
parallelize(sc, 1:47000, 47000)
sparkR.session.stop()
expect_true(TRUE)
})
2 changes: 2 additions & 0 deletions R/pkg/tests/fulltests/test_includePackage.R
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ test_that("include inside function", {
data <- lapplyPartition(rdd, generateData)
actual <- collectRDD(data)
}
expect_true(TRUE)
})

test_that("use include package", {
Expand All @@ -55,6 +56,7 @@ test_that("use include package", {
data <- lapplyPartition(rdd, generateData)
actual <- collectRDD(data)
}
expect_true(TRUE)
})

sparkR.session.stop()
1 change: 1 addition & 0 deletions R/pkg/tests/fulltests/test_sparkSQL.R
Original file line number Diff line number Diff line change
Expand Up @@ -1382,6 +1382,7 @@ test_that("column operators", {
c5 <- c2 ^ c3 ^ c4
c6 <- c2 %<=>% c3
c7 <- !c6
expect_true(TRUE)
})

test_that("column functions", {
Expand Down
1 change: 1 addition & 0 deletions R/pkg/tests/fulltests/test_textFile.R
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ test_that("several transformations on RDD created by textFile()", {
collectRDD(rdd)

unlink(fileName)
expect_true(TRUE)
})

test_that("textFile() followed by a saveAsTextFile() returns the same content", {
Expand Down
23 changes: 17 additions & 6 deletions R/pkg/tests/run-all.R
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ library(SparkR)

# SPARK-25572
if (identical(Sys.getenv("NOT_CRAN"), "true")) {

# Turn all warnings into errors
options("warn" = 2)

Expand Down Expand Up @@ -60,11 +59,23 @@ if (identical(Sys.getenv("NOT_CRAN"), "true")) {
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
# set random seed for predictable results. mostly for base's sample() in tree and classification
set.seed(42)
# for testthat 1.0.2 later, change reporter from "summary" to default_reporter()
testthat:::run_tests("SparkR",
file.path(sparkRDir, "pkg", "tests", "fulltests"),
NULL,
"summary")

# TODO (SPARK-30663) To be removed once testthat 1.x is removed from all builds
if (grepl("^1\\..*", packageVersion("testthat"))) {
# testthat 1.x
test_runner <- testthat:::run_tests
reporter <- "summary"

} else {
# testthat >= 2.0.0
test_runner <- testthat:::test_package_dir
reporter <- testthat::default_reporter()
}

test_runner("SparkR",
file.path(sparkRDir, "pkg", "tests", "fulltests"),
NULL,
reporter)
}

SparkR:::uninstallDownloadedSpark()
Expand Down
6 changes: 1 addition & 5 deletions appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,7 @@ install:
- ps: .\dev\appveyor-install-dependencies.ps1
# Required package for R unit tests
- cmd: R -e "install.packages(c('knitr', 'rmarkdown', 'e1071', 'survival', 'arrow'), repos='https://cloud.r-project.org/')"
# Here, we use the fixed version of testthat. For more details, please see SPARK-22817.
# As of devtools 2.1.0, it requires testthat higher then 2.1.1 as a dependency. SparkR test requires testthat 1.0.2.
# Therefore, we don't use devtools but installs it directly from the archive including its dependencies.
- cmd: R -e "install.packages(c('crayon', 'praise', 'R6'), repos='https://cloud.r-project.org/')"
- cmd: R -e "install.packages('https://cloud.r-project.org/src/contrib/Archive/testthat/testthat_1.0.2.tar.gz', repos=NULL, type='source')"
- cmd: R -e "install.packages(c('crayon', 'praise', 'R6', 'testthat'), repos='https://cloud.r-project.org/')"
- cmd: R -e "packageVersion('knitr'); packageVersion('rmarkdown'); packageVersion('testthat'); packageVersion('e1071'); packageVersion('survival'); packageVersion('arrow')"

build_script:
Expand Down
7 changes: 3 additions & 4 deletions docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ license: |
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Expand Down Expand Up @@ -39,9 +39,8 @@ installed. Also install the following libraries:
$ sudo gem install jekyll jekyll-redirect-from rouge
# Following is needed only for generating API docs
$ sudo pip install sphinx pypandoc mkdocs
$ sudo Rscript -e 'install.packages(c("knitr", "devtools", "rmarkdown"), repos="https://cloud.r-project.org/")'
$ sudo Rscript -e 'install.packages(c("knitr", "devtools", "testthat", "rmarkdown"), repos="https://cloud.r-project.org/")'
$ sudo Rscript -e 'devtools::install_version("roxygen2", version = "5.0.1", repos="https://cloud.r-project.org/")'
$ sudo Rscript -e 'devtools::install_version("testthat", version = "1.0.2", repos="https://cloud.r-project.org/")'
```

Note: If you are on a system with both Ruby 1.9 and Ruby 2.0 you may need to replace gem with gem2.0.
Expand Down
11 changes: 5 additions & 6 deletions docs/building-spark.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ license: |
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Expand Down Expand Up @@ -70,9 +70,9 @@ This will build Spark distribution along with Python pip and R packages. For mor

## Specifying the Hadoop Version and Enabling YARN

You can specify the exact version of Hadoop to compile against through the `hadoop.version` property.
You can specify the exact version of Hadoop to compile against through the `hadoop.version` property.

You can enable the `yarn` profile and optionally set the `yarn.version` property if it is different
You can enable the `yarn` profile and optionally set the `yarn.version` property if it is different
from `hadoop.version`.

Example:
Expand Down Expand Up @@ -238,8 +238,7 @@ The run-tests script also can be limited to a specific Python version or a speci

To run the SparkR tests you will need to install the [knitr](https://cran.r-project.org/package=knitr), [rmarkdown](https://cran.r-project.org/package=rmarkdown), [testthat](https://cran.r-project.org/package=testthat), [e1071](https://cran.r-project.org/package=e1071) and [survival](https://cran.r-project.org/package=survival) packages first:

Rscript -e "install.packages(c('knitr', 'rmarkdown', 'devtools', 'e1071', 'survival'), repos='https://cloud.r-project.org/')"
Rscript -e "devtools::install_version('testthat', version = '1.0.2', repos='https://cloud.r-project.org/')"
Rscript -e "install.packages(c('knitr', 'rmarkdown', 'devtools', 'testthat', 'e1071', 'survival'), repos='https://cloud.r-project.org/')"

You can run just the SparkR tests using the command:

Expand Down