Skip to content

Commit 5539ecf

Browse files
MaxGekkcloud-fan
authored andcommitted
[SPARK-31725][CORE][SQL][TESTS] Set America/Los_Angeles time zone and Locale.US in tests by default
### What changes were proposed in this pull request? Set default time zone and locale in the default constructor of `SparkFunSuite`: - Default time zone to `America/Los_Angeles` - Default locale to `Locale.US` ### Why are the changes needed? 1. To deduplicate code by moving common time zone and locale settings to one place SparkFunSuite 2. To have the same default time zone and locale in all tests. This should prevent errors like #28538 ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? by running all affected test suites Closes #28548 from MaxGekk/timezone-settings-SparkFunSuite. Authored-by: Max Gekk <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent 53bf825 commit 5539ecf

File tree

8 files changed

+10
-68
lines changed

8 files changed

+10
-68
lines changed

core/src/test/scala/org/apache/spark/SparkFunSuite.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ package org.apache.spark
1919

2020
// scalastyle:off
2121
import java.io.File
22+
import java.util.{Locale, TimeZone}
2223

2324
import org.apache.log4j.spi.LoggingEvent
2425

@@ -63,6 +64,11 @@ abstract class SparkFunSuite
6364
with Logging {
6465
// scalastyle:on
6566

67+
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
68+
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
69+
// Add Locale setting
70+
Locale.setDefault(Locale.US)
71+
6672
protected val enableAutoThreadAudit = true
6773

6874
protected override def beforeAll(): Unit = {

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/SortOrderExpressionsSuite.scala

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.sql.catalyst.expressions
1919

2020
import java.sql.Timestamp
21-
import java.util.TimeZone
2221

2322
import org.apache.spark.SparkFunSuite
2423
import org.apache.spark.sql.types._
@@ -35,15 +34,7 @@ class SortOrderExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
3534
val l1 = Literal.create(20132983L, LongType)
3635
val l2 = Literal.create(-20132983L, LongType)
3736
val millis = 1524954911000L
38-
// Explicitly choose a time zone, since Date objects can create different values depending on
39-
// local time zone of the machine on which the test is running
40-
val oldDefaultTZ = TimeZone.getDefault
41-
val d1 = try {
42-
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
43-
Literal.create(new java.sql.Date(millis), DateType)
44-
} finally {
45-
TimeZone.setDefault(oldDefaultTZ)
46-
}
37+
val d1 = Literal.create(new java.sql.Date(millis), DateType)
4738
val t1 = Literal.create(new Timestamp(millis), TimestampType)
4839
val f1 = Literal.create(0.7788229f, FloatType)
4940
val f2 = Literal.create(-0.7788229f, FloatType)

sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql
1919

20-
import java.util.{Locale, TimeZone}
20+
import java.util.TimeZone
2121

2222
import scala.collection.JavaConverters._
2323

@@ -35,11 +35,6 @@ abstract class QueryTest extends PlanTest {
3535

3636
protected def spark: SparkSession
3737

38-
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
39-
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
40-
// Add Locale setting
41-
Locale.setDefault(Locale.US)
42-
4338
/**
4439
* Runs the plan and makes sure the answer contains all of the keywords.
4540
*/

sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.sql
1919

2020
import java.io.File
21-
import java.util.{Locale, TimeZone}
21+
import java.util.Locale
2222
import java.util.regex.Pattern
2323

2424
import scala.collection.mutable.{ArrayBuffer, HashMap}
@@ -672,25 +672,16 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession {
672672
session.sql("DROP TABLE IF EXISTS tenk1")
673673
}
674674

675-
private val originalTimeZone = TimeZone.getDefault
676-
private val originalLocale = Locale.getDefault
677-
678675
override def beforeAll(): Unit = {
679676
super.beforeAll()
680677
createTestTables(spark)
681-
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
682-
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
683-
// Add Locale setting
684-
Locale.setDefault(Locale.US)
685678
RuleExecutor.resetMetrics()
686679
CodeGenerator.resetCompileTime()
687680
WholeStageCodegenExec.resetCodeGenTime()
688681
}
689682

690683
override def afterAll(): Unit = {
691684
try {
692-
TimeZone.setDefault(originalTimeZone)
693-
Locale.setDefault(originalLocale)
694685
removeTestTables(spark)
695686

696687
// For debugging dump some statistics about how much time was spent in various optimizer rules

sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.sql.hive.execution
1919

2020
import java.io.File
21-
import java.util.{Locale, TimeZone}
2221

2322
import org.scalatest.BeforeAndAfter
2423

@@ -36,8 +35,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
3635
private lazy val hiveQueryDir = TestHive.getHiveFile(
3736
"ql/src/test/queries/clientpositive".split("/").mkString(File.separator))
3837

39-
private val originalTimeZone = TimeZone.getDefault
40-
private val originalLocale = Locale.getDefault
4138
private val originalColumnBatchSize = TestHive.conf.columnBatchSize
4239
private val originalInMemoryPartitionPruning = TestHive.conf.inMemoryPartitionPruning
4340
private val originalCrossJoinEnabled = TestHive.conf.crossJoinEnabled
@@ -51,10 +48,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
5148
override def beforeAll(): Unit = {
5249
super.beforeAll()
5350
TestHive.setCacheTables(true)
54-
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
55-
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
56-
// Add Locale setting
57-
Locale.setDefault(Locale.US)
5851
// Set a relatively small column batch size for testing purposes
5952
TestHive.setConf(SQLConf.COLUMN_BATCH_SIZE, 5)
6053
// Enable in-memory partition pruning for testing purposes
@@ -73,8 +66,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
7366
override def afterAll(): Unit = {
7467
try {
7568
TestHive.setCacheTables(false)
76-
TimeZone.setDefault(originalTimeZone)
77-
Locale.setDefault(originalLocale)
7869
TestHive.setConf(SQLConf.COLUMN_BATCH_SIZE, originalColumnBatchSize)
7970
TestHive.setConf(SQLConf.IN_MEMORY_PARTITION_PRUNING, originalInMemoryPartitionPruning)
8071
TestHive.setConf(SQLConf.CROSS_JOINS_ENABLED, originalCrossJoinEnabled)

sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.sql.hive.execution
1919

2020
import java.io.File
21-
import java.util.{Locale, TimeZone}
2221

2322
import org.scalatest.BeforeAndAfter
2423

@@ -33,17 +32,11 @@ import org.apache.spark.util.Utils
3332
* files, every `createQueryTest` calls should explicitly set `reset` to `false`.
3433
*/
3534
class HiveWindowFunctionQuerySuite extends HiveComparisonTest with BeforeAndAfter {
36-
private val originalTimeZone = TimeZone.getDefault
37-
private val originalLocale = Locale.getDefault
3835
private val testTempDir = Utils.createTempDir()
3936

4037
override def beforeAll(): Unit = {
4138
super.beforeAll()
4239
TestHive.setCacheTables(true)
43-
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
44-
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
45-
// Add Locale setting
46-
Locale.setDefault(Locale.US)
4740

4841
// Create the table used in windowing.q
4942
sql("DROP TABLE IF EXISTS part")
@@ -103,8 +96,6 @@ class HiveWindowFunctionQuerySuite extends HiveComparisonTest with BeforeAndAfte
10396
override def afterAll(): Unit = {
10497
try {
10598
TestHive.setCacheTables(false)
106-
TimeZone.setDefault(originalTimeZone)
107-
Locale.setDefault(originalLocale)
10899
TestHive.reset()
109100
} finally {
110101
super.afterAll()
@@ -747,17 +738,11 @@ class HiveWindowFunctionQuerySuite extends HiveComparisonTest with BeforeAndAfte
747738

748739
class HiveWindowFunctionQueryFileSuite
749740
extends HiveCompatibilitySuite with BeforeAndAfter {
750-
private val originalTimeZone = TimeZone.getDefault
751-
private val originalLocale = Locale.getDefault
752741
private val testTempDir = Utils.createTempDir()
753742

754743
override def beforeAll(): Unit = {
755744
super.beforeAll()
756745
TestHive.setCacheTables(true)
757-
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
758-
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
759-
// Add Locale setting
760-
Locale.setDefault(Locale.US)
761746

762747
// The following settings are used for generating golden files with Hive.
763748
// We have to use kryo to correctly let Hive serialize plans with window functions.
@@ -772,8 +757,6 @@ class HiveWindowFunctionQueryFileSuite
772757
override def afterAll(): Unit = {
773758
try {
774759
TestHive.setCacheTables(false)
775-
TimeZone.setDefault(originalTimeZone)
776-
Locale.setDefault(originalLocale)
777760
TestHive.reset()
778761
} finally {
779762
super.afterAll()

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.sql.hive
1919

2020
import java.util
21-
import java.util.{Locale, TimeZone}
2221

2322
import org.apache.hadoop.hive.ql.udf.UDAFPercentile
2423
import org.apache.hadoop.hive.serde2.io.DoubleWritable
@@ -74,11 +73,6 @@ class HiveInspectorSuite extends SparkFunSuite with HiveInspectors {
7473
.get())
7574
}
7675

77-
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
78-
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
79-
// Add Locale setting
80-
Locale.setDefault(Locale.US)
81-
8276
val data =
8377
Literal(true) ::
8478
Literal(null) ::

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.hive.execution
2020
import java.io.File
2121
import java.net.URI
2222
import java.sql.Timestamp
23-
import java.util.{Locale, TimeZone}
23+
import java.util.Locale
2424

2525
import scala.util.Try
2626

@@ -47,9 +47,6 @@ case class TestData(a: Int, b: String)
4747
* included in the hive distribution.
4848
*/
4949
class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAndAfter {
50-
private val originalTimeZone = TimeZone.getDefault
51-
private val originalLocale = Locale.getDefault
52-
5350
import org.apache.spark.sql.hive.test.TestHive.implicits._
5451

5552
private val originalCrossJoinEnabled = TestHive.conf.crossJoinEnabled
@@ -59,19 +56,13 @@ class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAnd
5956
override def beforeAll(): Unit = {
6057
super.beforeAll()
6158
TestHive.setCacheTables(true)
62-
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
63-
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
64-
// Add Locale setting
65-
Locale.setDefault(Locale.US)
6659
// Ensures that cross joins are enabled so that we can test them
6760
TestHive.setConf(SQLConf.CROSS_JOINS_ENABLED, true)
6861
}
6962

7063
override def afterAll(): Unit = {
7164
try {
7265
TestHive.setCacheTables(false)
73-
TimeZone.setDefault(originalTimeZone)
74-
Locale.setDefault(originalLocale)
7566
sql("DROP TEMPORARY FUNCTION IF EXISTS udtf_count2")
7667
TestHive.setConf(SQLConf.CROSS_JOINS_ENABLED, originalCrossJoinEnabled)
7768
} finally {

0 commit comments

Comments
 (0)