diff --git a/docs/configuration.md b/docs/configuration.md
index 96e8c6d08a1e..2a366e5d69c2 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -1752,6 +1752,14 @@ showDF(properties, numRows = 200, truncate = FALSE)
Executable for executing R scripts in client modes for driver. Ignored in cluster modes.
+
+ spark.r.shell.command |
+ R |
+
+ Executable for executing sparkR shell in client modes for driver. Ignored in cluster modes. It is the same as environment variable SPARKR_DRIVER_R, but take precedence over it.
+ spark.r.shell.command is used for sparkR shell while spark.r.driver.command is used for running R script.
+ |
+
#### Deploy
@@ -1818,7 +1826,8 @@ The following variables can be set in `spark-env.sh`:
SPARKR_DRIVER_R |
- R binary executable to use for SparkR shell (default is R). |
+ R binary executable to use for SparkR shell (default is R).
+ Property spark.r.shell.command take precedence if it is set |
SPARK_LOCAL_IP |
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
index 7b7a7bf57b11..ea56214d2390 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
@@ -68,6 +68,8 @@ public class SparkLauncher {
static final String PYSPARK_PYTHON = "spark.pyspark.python";
+ static final String SPARKR_R_SHELL = "spark.r.shell.command";
+
/** Logger name to use when launching a child process. */
public static final String CHILD_PROCESS_LOGGER_NAME = "spark.launcher.childProcLoggerName";
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
index f6da644e4c37..29c6d82cdbf1 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
@@ -336,7 +336,8 @@ private List buildSparkRCommand(Map env) throws IOExcept
join(File.separator, sparkHome, "R", "lib", "SparkR", "profile", "shell.R"));
List args = new ArrayList<>();
- args.add(firstNonEmpty(System.getenv("SPARKR_DRIVER_R"), "R"));
+ args.add(firstNonEmpty(conf.get(SparkLauncher.SPARKR_R_SHELL),
+ System.getenv("SPARKR_DRIVER_R"), "R"));
return args;
}
diff --git a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
index 16e5a22401ca..ad2e7a70c4ea 100644
--- a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
+++ b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
@@ -172,6 +172,24 @@ public void testPySparkFallback() throws Exception {
assertEquals("arg1", cmd.get(cmd.size() - 1));
}
+ @Test
+ public void testSparkRShell() throws Exception {
+ List sparkSubmitArgs = Arrays.asList(
+ SparkSubmitCommandBuilder.SPARKR_SHELL,
+ "--master=foo",
+ "--deploy-mode=bar",
+ "--conf", "spark.r.shell.command=/usr/bin/R");
+
+ Map env = new HashMap<>();
+ List cmd = buildCommand(sparkSubmitArgs, env);
+ assertEquals("/usr/bin/R", cmd.get(cmd.size() - 1));
+ assertEquals(
+ String.format(
+ "\"%s\" \"foo\" \"%s\" \"bar\" \"--conf\" \"spark.r.shell.command=/usr/bin/R\" \"%s\"",
+ parser.MASTER, parser.DEPLOY_MODE, SparkSubmitCommandBuilder.SPARKR_SHELL_RESOURCE),
+ env.get("SPARKR_SUBMIT_ARGS"));
+ }
+
@Test
public void testExamplesRunner() throws Exception {
List sparkSubmitArgs = Arrays.asList(