@@ -58,14 +58,23 @@ setMethod("initialize", "SparkDataFrame", function(.Object, sdf, isCached) {
5858# ' Set options/mode and then return the write object
5959# ' @noRd
6060setWriteOptions <- function (write , path = NULL , mode = " error" , ... ) {
61- options <- varargsToStrEnv(... )
62- if (! is.null(path )) {
63- options [[" path" ]] <- path
64- }
65- jmode <- convertToJSaveMode(mode )
66- write <- callJMethod(write , " mode" , jmode )
67- write <- callJMethod(write , " options" , options )
68- write
61+ options <- varargsToStrEnv(... )
62+ if (! is.null(path )) {
63+ options [[" path" ]] <- path
64+ }
65+ write <- setWriteMode(write , mode )
66+ write <- callJMethod(write , " options" , options )
67+ write
68+ }
69+
70+ # ' Set mode and then return the write object
71+ # ' @noRd
72+ setWriteMode <- function (write , mode ) {
73+ if (! is.character(mode )) {
74+ stop(" mode should be character or omitted. It is 'error' by default." )
75+ }
76+ write <- handledCallJMethod(write , " mode" , mode )
77+ write
6978}
7079
7180# ' @export
@@ -556,9 +565,8 @@ setMethod("registerTempTable",
556565setMethod ("insertInto ",
557566 signature(x = " SparkDataFrame" , tableName = " character" ),
558567 function (x , tableName , overwrite = FALSE ) {
559- jmode <- convertToJSaveMode(ifelse(overwrite , " overwrite" , " append" ))
560568 write <- callJMethod(x @ sdf , " write" )
561- write <- callJMethod (write , " mode " , jmode )
569+ write <- setWriteMode (write , ifelse( overwrite , " overwrite " , " append " ) )
562570 invisible (callJMethod(write , " insertInto" , tableName ))
563571 })
564572
@@ -810,7 +818,8 @@ setMethod("toJSON",
810818# '
811819# ' @param x A SparkDataFrame
812820# ' @param path The directory where the file is saved
813- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default)
821+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
822+ # ' save mode (it is 'error' by default)
814823# ' @param ... additional argument(s) passed to the method.
815824# '
816825# ' @family SparkDataFrame functions
@@ -841,7 +850,8 @@ setMethod("write.json",
841850# '
842851# ' @param x A SparkDataFrame
843852# ' @param path The directory where the file is saved
844- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default)
853+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
854+ # ' save mode (it is 'error' by default)
845855# ' @param ... additional argument(s) passed to the method.
846856# '
847857# ' @family SparkDataFrame functions
@@ -872,7 +882,8 @@ setMethod("write.orc",
872882# '
873883# ' @param x A SparkDataFrame
874884# ' @param path The directory where the file is saved
875- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default)
885+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
886+ # ' save mode (it is 'error' by default)
876887# ' @param ... additional argument(s) passed to the method.
877888# '
878889# ' @family SparkDataFrame functions
@@ -917,7 +928,8 @@ setMethod("saveAsParquetFile",
917928# '
918929# ' @param x A SparkDataFrame
919930# ' @param path The directory where the file is saved
920- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default)
931+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
932+ # ' save mode (it is 'error' by default)
921933# ' @param ... additional argument(s) passed to the method.
922934# '
923935# ' @family SparkDataFrame functions
@@ -2871,18 +2883,19 @@ setMethod("except",
28712883# ' Additionally, mode is used to specify the behavior of the save operation when data already
28722884# ' exists in the data source. There are four modes:
28732885# ' \itemize{
2874- # ' \item append: Contents of this SparkDataFrame are expected to be appended to existing data.
2875- # ' \item overwrite: Existing data is expected to be overwritten by the contents of this
2886+ # ' \item ' append' : Contents of this SparkDataFrame are expected to be appended to existing data.
2887+ # ' \item ' overwrite' : Existing data is expected to be overwritten by the contents of this
28762888# ' SparkDataFrame.
2877- # ' \item error: An exception is expected to be thrown.
2878- # ' \item ignore: The save operation is expected to not save the contents of the SparkDataFrame
2889+ # ' \item ' error' or 'errorifexists' : An exception is expected to be thrown.
2890+ # ' \item ' ignore' : The save operation is expected to not save the contents of the SparkDataFrame
28792891# ' and to not change the existing data.
28802892# ' }
28812893# '
28822894# ' @param df a SparkDataFrame.
28832895# ' @param path a name for the table.
28842896# ' @param source a name for external data source.
2885- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default)
2897+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
2898+ # ' save mode (it is 'error' by default)
28862899# ' @param ... additional argument(s) passed to the method.
28872900# '
28882901# ' @family SparkDataFrame functions
@@ -2940,17 +2953,18 @@ setMethod("saveDF",
29402953# '
29412954# ' Additionally, mode is used to specify the behavior of the save operation when
29422955# ' data already exists in the data source. There are four modes: \cr
2943- # ' append: Contents of this SparkDataFrame are expected to be appended to existing data. \cr
2944- # ' overwrite: Existing data is expected to be overwritten by the contents of this
2956+ # ' ' append' : Contents of this SparkDataFrame are expected to be appended to existing data. \cr
2957+ # ' ' overwrite' : Existing data is expected to be overwritten by the contents of this
29452958# ' SparkDataFrame. \cr
2946- # ' error: An exception is expected to be thrown. \cr
2947- # ' ignore: The save operation is expected to not save the contents of the SparkDataFrame
2959+ # ' ' error' or 'errorifexists' : An exception is expected to be thrown. \cr
2960+ # ' ' ignore' : The save operation is expected to not save the contents of the SparkDataFrame
29482961# ' and to not change the existing data. \cr
29492962# '
29502963# ' @param df a SparkDataFrame.
29512964# ' @param tableName a name for the table.
29522965# ' @param source a name for external data source.
2953- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default).
2966+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
2967+ # ' save mode (it is 'error' by default)
29542968# ' @param ... additional option(s) passed to the method.
29552969# '
29562970# ' @family SparkDataFrame functions
@@ -2972,12 +2986,11 @@ setMethod("saveAsTable",
29722986 if (is.null(source )) {
29732987 source <- getDefaultSqlSource()
29742988 }
2975- jmode <- convertToJSaveMode(mode )
29762989 options <- varargsToStrEnv(... )
29772990
29782991 write <- callJMethod(df @ sdf , " write" )
29792992 write <- callJMethod(write , " format" , source )
2980- write <- callJMethod (write , " mode" , jmode )
2993+ write <- setWriteMode (write , mode )
29812994 write <- callJMethod(write , " options" , options )
29822995 invisible (callJMethod(write , " saveAsTable" , tableName ))
29832996 })
@@ -3236,7 +3249,7 @@ setMethod("as.data.frame",
32363249# '
32373250# ' @family SparkDataFrame functions
32383251# ' @rdname attach
3239- # ' @aliases attach,SparkDataFrame-method
3252+ # ' @aliases attach attach ,SparkDataFrame-method
32403253# ' @param what (SparkDataFrame) The SparkDataFrame to attach
32413254# ' @param pos (integer) Specify position in search() where to attach.
32423255# ' @param name (character) Name to use for the attached SparkDataFrame. Names
@@ -3252,9 +3265,12 @@ setMethod("as.data.frame",
32523265# ' @note attach since 1.6.0
32533266setMethod ("attach ",
32543267 signature(what = " SparkDataFrame" ),
3255- function (what , pos = 2 , name = deparse(substitute(what )), warn.conflicts = TRUE ) {
3256- newEnv <- assignNewEnv(what )
3257- attach(newEnv , pos = pos , name = name , warn.conflicts = warn.conflicts )
3268+ function (what , pos = 2L , name = deparse(substitute(what ), backtick = FALSE ),
3269+ warn.conflicts = TRUE ) {
3270+ args <- as.list(environment()) # capture all parameters - this must be the first line
3271+ newEnv <- assignNewEnv(args $ what )
3272+ args $ what <- newEnv
3273+ do.call(attach , args )
32583274 })
32593275
32603276# ' Evaluate a R expression in an environment constructed from a SparkDataFrame
@@ -3541,18 +3557,19 @@ setMethod("histogram",
35413557# ' Also, mode is used to specify the behavior of the save operation when
35423558# ' data already exists in the data source. There are four modes:
35433559# ' \itemize{
3544- # ' \item append: Contents of this SparkDataFrame are expected to be appended to existing data.
3545- # ' \item overwrite: Existing data is expected to be overwritten by the contents of this
3560+ # ' \item ' append' : Contents of this SparkDataFrame are expected to be appended to existing data.
3561+ # ' \item ' overwrite' : Existing data is expected to be overwritten by the contents of this
35463562# ' SparkDataFrame.
3547- # ' \item error: An exception is expected to be thrown.
3548- # ' \item ignore: The save operation is expected to not save the contents of the SparkDataFrame
3563+ # ' \item ' error' or 'errorifexists' : An exception is expected to be thrown.
3564+ # ' \item ' ignore' : The save operation is expected to not save the contents of the SparkDataFrame
35493565# ' and to not change the existing data.
35503566# ' }
35513567# '
35523568# ' @param x a SparkDataFrame.
35533569# ' @param url JDBC database url of the form \code{jdbc:subprotocol:subname}.
35543570# ' @param tableName yhe name of the table in the external database.
3555- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default).
3571+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
3572+ # ' save mode (it is 'error' by default)
35563573# ' @param ... additional JDBC database connection properties.
35573574# ' @family SparkDataFrame functions
35583575# ' @rdname write.jdbc
@@ -3569,10 +3586,9 @@ setMethod("histogram",
35693586setMethod ("write.jdbc ",
35703587 signature(x = " SparkDataFrame" , url = " character" , tableName = " character" ),
35713588 function (x , url , tableName , mode = " error" , ... ) {
3572- jmode <- convertToJSaveMode(mode )
35733589 jprops <- varargsToJProperties(... )
35743590 write <- callJMethod(x @ sdf , " write" )
3575- write <- callJMethod (write , " mode" , jmode )
3591+ write <- setWriteMode (write , mode )
35763592 invisible (handledCallJMethod(write , " jdbc" , url , tableName , jprops ))
35773593 })
35783594
0 commit comments