Skip to content

Commit 2cc1ee4

Browse files
committed
[SPARK-45344][CORE][SQL] Remove all Scala version string check
### What changes were proposed in this pull request? This PR removes all the no longer needed Scala version string checks. ### Why are the changes needed? These version checks are no longer needed. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Pass GitHub Actions ### Was this patch authored or co-authored using generative AI tooling? No Closes #43133 from LuciferYang/SPARK-45344. Authored-by: yangjie01 <[email protected]> Signed-off-by: yangjie01 <[email protected]>
1 parent 6d2ffaa commit 2cc1ee4

File tree

4 files changed

+3
-38
lines changed

4 files changed

+3
-38
lines changed

core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ import javax.annotation.Nullable
2626
import scala.collection.mutable.ArrayBuffer
2727
import scala.jdk.CollectionConverters._
2828
import scala.reflect.ClassTag
29-
import scala.util.Properties
3029
import scala.util.control.NonFatal
3130

3231
import com.esotericsoftware.kryo.{Kryo, KryoException, Serializer => KryoClassSerializer}
@@ -229,9 +228,7 @@ class KryoSerializer(conf: SparkConf)
229228

230229
kryo.register(None.getClass)
231230
kryo.register(Nil.getClass)
232-
if (Properties.versionNumberString.startsWith("2.13")) {
233-
kryo.register(Utils.classForName("scala.collection.immutable.ArraySeq$ofRef"))
234-
}
231+
kryo.register(Utils.classForName("scala.collection.immutable.ArraySeq$ofRef"))
235232
kryo.register(Utils.classForName("scala.collection.immutable.$colon$colon"))
236233
kryo.register(Utils.classForName("scala.collection.immutable.Map$EmptyMap$"))
237234
kryo.register(Utils.classForName("scala.math.Ordering$Reverse"))

core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala

Lines changed: 1 addition & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@ package org.apache.spark.deploy.rest
1919

2020
import java.lang.Boolean
2121

22-
import scala.util.Properties.versionNumberString
23-
2422
import org.json4s.jackson.JsonMethods._
2523

2624
import org.apache.spark.{SparkConf, SparkFunSuite}
@@ -235,34 +233,7 @@ class SubmitRestProtocolSuite extends SparkFunSuite {
235233
|}
236234
""".stripMargin
237235

238-
private lazy val submitDriverRequestJson = if (versionNumberString.startsWith("2.12")) {
239-
s"""
240-
|{
241-
| "action" : "CreateSubmissionRequest",
242-
| "appArgs" : [ "two slices", "a hint of cinnamon" ],
243-
| "appResource" : "honey-walnut-cherry.jar",
244-
| "clientSparkVersion" : "1.2.3",
245-
| "environmentVariables" : {
246-
| "PATH" : "/dev/null"
247-
| },
248-
| "mainClass" : "org.apache.spark.examples.SparkPie",
249-
| "sparkProperties" : {
250-
| "spark.archives" : "fireballs.zip",
251-
| "spark.driver.extraLibraryPath" : "pickle.jar",
252-
| "spark.jars" : "mayonnaise.jar,ketchup.jar",
253-
| "spark.driver.supervise" : "false",
254-
| "spark.app.name" : "SparkPie",
255-
| "spark.cores.max" : "10000",
256-
| "spark.driver.memory" : "${Utils.DEFAULT_DRIVER_MEM_MB}m",
257-
| "spark.files" : "fireball.png",
258-
| "spark.driver.cores" : "180",
259-
| "spark.driver.extraJavaOptions" : " -Dslices=5 -Dcolor=mostly_red",
260-
| "spark.executor.memory" : "256m",
261-
| "spark.driver.extraClassPath" : "food-coloring.jar"
262-
| }
263-
|}
264-
""".stripMargin
265-
} else {
236+
private lazy val submitDriverRequestJson =
266237
s"""
267238
|{
268239
| "action" : "CreateSubmissionRequest",
@@ -289,7 +260,6 @@ class SubmitRestProtocolSuite extends SparkFunSuite {
289260
| }
290261
|}
291262
""".stripMargin
292-
}
293263

294264
private val submitDriverResponseJson =
295265
"""

core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -553,7 +553,6 @@ class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
553553
}
554554

555555
test("SPARK-43898: Register scala.collection.immutable.ArraySeq$ofRef for Scala 2.13") {
556-
assume(scala.util.Properties.versionNumberString.startsWith("2.13"))
557556
val conf = new SparkConf(false)
558557
conf.set(KRYO_REGISTRATION_REQUIRED, true)
559558
val ser = new KryoSerializer(conf).newInstance()

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -162,9 +162,8 @@ class HiveSparkSubmitSuite
162162
// Before the fix in SPARK-8470, this results in a MissingRequirementError because
163163
// the HiveContext code mistakenly overrides the class loader that contains user classes.
164164
// For more detail, see sql/hive/src/test/resources/regression-test-SPARK-8489/*scala.
165-
// TODO: revisit for Scala 2.13 support
166165
val version = Properties.versionNumberString match {
167-
case v if v.startsWith("2.12") || v.startsWith("2.13") => v.substring(0, 4)
166+
case v if v.startsWith("2.13") => v.substring(0, 4)
168167
case x => throw new Exception(s"Unsupported Scala Version: $x")
169168
}
170169
val jarDir = getTestResourcePath("regression-test-SPARK-8489")

0 commit comments

Comments
 (0)