Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -1752,6 +1752,14 @@ showDF(properties, numRows = 200, truncate = FALSE)
Executable for executing R scripts in client modes for driver. Ignored in cluster modes.
</td>
</tr>
<tr>
<td><code>spark.r.shell.command</code></td>
<td>R</td>
<td>
Executable for executing sparkR shell in client modes for driver. Ignored in cluster modes. It is the same as environment variable <code>SPARKR_DRIVER_R</code>, but take precedence over it.
<code>spark.r.shell.command</code> is used for sparkR shell while <code>spark.r.driver.command</code> is used for running R script.
</td>
</tr>
</table>

#### Deploy
Expand Down Expand Up @@ -1818,7 +1826,8 @@ The following variables can be set in `spark-env.sh`:
</tr>
<tr>
<td><code>SPARKR_DRIVER_R</code></td>
<td>R binary executable to use for SparkR shell (default is <code>R</code>).</td>
<td>R binary executable to use for SparkR shell (default is <code>R</code>).
Property <code>spark.r.shell.command</code> take precedence if it is set</td>
</tr>
<tr>
<td><code>SPARK_LOCAL_IP</code></td>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,8 @@ public class SparkLauncher {

static final String PYSPARK_PYTHON = "spark.pyspark.python";

static final String SPARKR_R_SHELL = "spark.r.shell.command";

/** Logger name to use when launching a child process. */
public static final String CHILD_PROCESS_LOGGER_NAME = "spark.launcher.childProcLoggerName";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,8 @@ private List<String> buildSparkRCommand(Map<String, String> env) throws IOExcept
join(File.separator, sparkHome, "R", "lib", "SparkR", "profile", "shell.R"));

List<String> args = new ArrayList<>();
args.add(firstNonEmpty(System.getenv("SPARKR_DRIVER_R"), "R"));
args.add(firstNonEmpty(conf.get(SparkLauncher.SPARKR_R_SHELL),
System.getenv("SPARKR_DRIVER_R"), "R"));
return args;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,24 @@ public void testPySparkFallback() throws Exception {
assertEquals("arg1", cmd.get(cmd.size() - 1));
}

@Test
public void testSparkRShell() throws Exception {
List<String> sparkSubmitArgs = Arrays.asList(
SparkSubmitCommandBuilder.SPARKR_SHELL,
"--master=foo",
"--deploy-mode=bar",
"--conf", "spark.r.shell.command=/usr/bin/R");

Map<String, String> env = new HashMap<>();
List<String> cmd = buildCommand(sparkSubmitArgs, env);
assertEquals("/usr/bin/R", cmd.get(cmd.size() - 1));
assertEquals(
String.format(
"\"%s\" \"foo\" \"%s\" \"bar\" \"--conf\" \"spark.r.shell.command=/usr/bin/R\" \"%s\"",
parser.MASTER, parser.DEPLOY_MODE, SparkSubmitCommandBuilder.SPARKR_SHELL_RESOURCE),
env.get("SPARKR_SUBMIT_ARGS"));
}

@Test
public void testExamplesRunner() throws Exception {
List<String> sparkSubmitArgs = Arrays.asList(
Expand Down