From 9a3aea61caf8030659de1e81b59030b8ae3bfa1b Mon Sep 17 00:00:00 2001 From: Jeff Zhang Date: Mon, 13 Feb 2017 13:52:22 +0800 Subject: [PATCH 1/3] [SPARK-19572][SPARKR] Allow to disable hive in sparkR shell --- .../main/scala/org/apache/spark/sql/api/r/SQLUtils.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala index e56c33e4b512..b8c8b08d9176 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala @@ -47,10 +47,12 @@ private[sql] object SQLUtils extends Logging { jsc: JavaSparkContext, sparkConfigMap: JMap[Object, Object], enableHiveSupport: Boolean): SparkSession = { - val spark = if (SparkSession.hiveClassesArePresent && enableHiveSupport) { + val spark = if (SparkSession.hiveClassesArePresent && enableHiveSupport + && jsc.sc.conf.get(CATALOG_IMPLEMENTATION.key, "hive") == "hive") { SparkSession.builder().sparkContext(withHiveExternalCatalog(jsc.sc)).getOrCreate() } else { - if (enableHiveSupport) { + if (enableHiveSupport + && jsc.sc.conf.get(CATALOG_IMPLEMENTATION.key, "hive") == "hive") { logWarning("SparkR: enableHiveSupport is requested for SparkSession but " + "Spark is not built with Hive; falling back to without Hive support.") } From 68b5823cf070dc09512723ef3a671589d0f42275 Mon Sep 17 00:00:00 2001 From: Jeff Zhang Date: Tue, 14 Feb 2017 10:02:10 +0800 Subject: [PATCH 2/3] address comments --- .../main/scala/org/apache/spark/sql/api/r/SQLUtils.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala index b8c8b08d9176..70f0915f049d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala @@ -48,13 +48,14 @@ private[sql] object SQLUtils extends Logging { sparkConfigMap: JMap[Object, Object], enableHiveSupport: Boolean): SparkSession = { val spark = if (SparkSession.hiveClassesArePresent && enableHiveSupport - && jsc.sc.conf.get(CATALOG_IMPLEMENTATION.key, "hive") == "hive") { + && jsc.sc.conf.get(CATALOG_IMPLEMENTATION.key, "hive").toLowerCase == "hive") { SparkSession.builder().sparkContext(withHiveExternalCatalog(jsc.sc)).getOrCreate() } else { if (enableHiveSupport - && jsc.sc.conf.get(CATALOG_IMPLEMENTATION.key, "hive") == "hive") { + && jsc.sc.conf.get(CATALOG_IMPLEMENTATION.key, "hive").toLowerCase == "hive") { logWarning("SparkR: enableHiveSupport is requested for SparkSession but " + - "Spark is not built with Hive; falling back to without Hive support.") + s"Spark is not built with Hive or ${CATALOG_IMPLEMENTATION.key} is not set to 'hive', " + + "falling back to without Hive support.") } SparkSession.builder().sparkContext(jsc.sc).getOrCreate() } From 89a8752deac4bfeb7a8a224b06127bec8654a61d Mon Sep 17 00:00:00 2001 From: Jeff Zhang Date: Wed, 1 Mar 2017 10:31:56 +0800 Subject: [PATCH 3/3] update the if condition --- .../src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala index 70f0915f049d..a4c5bf756cd5 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala @@ -51,8 +51,7 @@ private[sql] object SQLUtils extends Logging { && jsc.sc.conf.get(CATALOG_IMPLEMENTATION.key, "hive").toLowerCase == "hive") { SparkSession.builder().sparkContext(withHiveExternalCatalog(jsc.sc)).getOrCreate() } else { - if (enableHiveSupport - && jsc.sc.conf.get(CATALOG_IMPLEMENTATION.key, "hive").toLowerCase == "hive") { + if (enableHiveSupport) { logWarning("SparkR: enableHiveSupport is requested for SparkSession but " + s"Spark is not built with Hive or ${CATALOG_IMPLEMENTATION.key} is not set to 'hive', " + "falling back to without Hive support.")