@@ -253,6 +253,47 @@ class HiveSparkSubmitSuite
253253 runSparkSubmit(args)
254254 }
255255
256+ test(" SPARK-16901: set javax.jdo.option.ConnectionURL" ) {
257+ // In this test, we set javax.jdo.option.ConnectionURL and set metastore version to
258+ // 0.13. This test will make sure that javax.jdo.option.ConnectionURL will not be
259+ // overridden by hive's default settings when we create a HiveConf object inside
260+ // HiveClientImpl. Please see SPARK-16901 for more details.
261+
262+ val metastoreLocation = Utils .createTempDir()
263+ metastoreLocation.delete()
264+ val metastoreURL =
265+ s " jdbc:derby:memory:;databaseName= ${metastoreLocation.getAbsolutePath};create=true "
266+ val hiveSiteXmlContent =
267+ s """
268+ |<configuration>
269+ | <property>
270+ | <name>javax.jdo.option.ConnectionURL</name>
271+ | <value> $metastoreURL</value>
272+ | </property>
273+ |</configuration>
274+ """ .stripMargin
275+
276+ // Write a hive-site.xml containing a setting of hive.metastore.warehouse.dir.
277+ val hiveSiteDir = Utils .createTempDir()
278+ val file = new File (hiveSiteDir.getCanonicalPath, " hive-site.xml" )
279+ val bw = new BufferedWriter (new FileWriter (file))
280+ bw.write(hiveSiteXmlContent)
281+ bw.close()
282+
283+ val unusedJar = TestUtils .createJarWithClasses(Seq .empty)
284+ val args = Seq (
285+ " --class" , SetMetastoreURLTest .getClass.getName.stripSuffix(" $" ),
286+ " --name" , " SetMetastoreURLTest" ,
287+ " --master" , " local[1]" ,
288+ " --conf" , " spark.ui.enabled=false" ,
289+ " --conf" , " spark.master.rest.enabled=false" ,
290+ " --conf" , s " spark.sql.test.expectedMetastoreURL= $metastoreURL" ,
291+ " --conf" , s " spark.driver.extraClassPath= ${hiveSiteDir.getCanonicalPath}" ,
292+ " --driver-java-options" , " -Dderby.system.durability=test" ,
293+ unusedJar.toString)
294+ runSparkSubmit(args)
295+ }
296+
256297 // NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
257298 // This is copied from org.apache.spark.deploy.SparkSubmitSuite
258299 private def runSparkSubmit (args : Seq [String ]): Unit = {
@@ -313,6 +354,45 @@ class HiveSparkSubmitSuite
313354 }
314355}
315356
357+ object SetMetastoreURLTest extends Logging {
358+ def main (args : Array [String ]): Unit = {
359+ Utils .configTestLog4j(" INFO" )
360+
361+ val sparkConf = new SparkConf (loadDefaults = true )
362+ val builder = SparkSession .builder()
363+ .config(sparkConf)
364+ .config(" spark.ui.enabled" , " false" )
365+ .config(" spark.sql.hive.metastore.version" , " 0.13.1" )
366+ // The issue described in SPARK-16901 only appear when
367+ // spark.sql.hive.metastore.jars is not set to builtin.
368+ .config(" spark.sql.hive.metastore.jars" , " maven" )
369+ .enableHiveSupport()
370+
371+ val spark = builder.getOrCreate()
372+ val expectedMetastoreURL =
373+ spark.conf.get(" spark.sql.test.expectedMetastoreURL" )
374+ logInfo(s " spark.sql.test.expectedMetastoreURL is $expectedMetastoreURL" )
375+
376+ if (expectedMetastoreURL == null ) {
377+ throw new Exception (
378+ s " spark.sql.test.expectedMetastoreURL should be set. " )
379+ }
380+
381+ // HiveSharedState is used when Hive support is enabled.
382+ val actualMetastoreURL =
383+ spark.sharedState.asInstanceOf [HiveSharedState ]
384+ .metadataHive
385+ .getConf(" javax.jdo.option.ConnectionURL" , " this_is_a_wrong_URL" )
386+ logInfo(s " javax.jdo.option.ConnectionURL is $actualMetastoreURL" )
387+
388+ if (actualMetastoreURL != expectedMetastoreURL) {
389+ throw new Exception (
390+ s " Expected value of javax.jdo.option.ConnectionURL is $expectedMetastoreURL. But, " +
391+ s " the actual value is $actualMetastoreURL" )
392+ }
393+ }
394+ }
395+
316396object SetWarehouseLocationTest extends Logging {
317397 def main (args : Array [String ]): Unit = {
318398 Utils .configTestLog4j(" INFO" )
0 commit comments