Skip to content

Commit 8e875d2

Browse files
WangTaoTheTonicpwendell
authored andcommitted
[SPARK-3599]Avoid loading properties file frequently
https://issues.apache.org/jira/browse/SPARK-3599 Author: WangTao <[email protected]> Author: WangTaoTheTonic <[email protected]> Closes apache#2454 from WangTaoTheTonic/avoidLoadingFrequently and squashes the following commits: 3681182 [WangTao] do not use clone 7dca036 [WangTao] use lazy val instead 2a79f26 [WangTaoTheTonic] Avoid loaing properties file frequently
1 parent 293ce85 commit 8e875d2

File tree

2 files changed

+10
-9
lines changed

2 files changed

+10
-9
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -280,7 +280,7 @@ object SparkSubmit {
280280
}
281281

282282
// Read from default spark properties, if any
283-
for ((k, v) <- args.getDefaultSparkProperties) {
283+
for ((k, v) <- args.defaultSparkProperties) {
284284
sysProps.getOrElseUpdate(k, v)
285285
}
286286

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -57,12 +57,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
5757
var pyFiles: String = null
5858
val sparkProperties: HashMap[String, String] = new HashMap[String, String]()
5959

60-
parseOpts(args.toList)
61-
mergeSparkProperties()
62-
checkRequiredArguments()
63-
64-
/** Return default present in the currently defined defaults file. */
65-
def getDefaultSparkProperties = {
60+
/** Default properties present in the currently defined defaults file. */
61+
lazy val defaultSparkProperties: HashMap[String, String] = {
6662
val defaultProperties = new HashMap[String, String]()
6763
if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile")
6864
Option(propertiesFile).foreach { filename =>
@@ -79,6 +75,10 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
7975
defaultProperties
8076
}
8177

78+
parseOpts(args.toList)
79+
mergeSparkProperties()
80+
checkRequiredArguments()
81+
8282
/**
8383
* Fill in any undefined values based on the default properties file or options passed in through
8484
* the '--conf' flag.
@@ -107,7 +107,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
107107
}
108108
}
109109

110-
val properties = getDefaultSparkProperties
110+
val properties = HashMap[String, String]()
111+
properties.putAll(defaultSparkProperties)
111112
properties.putAll(sparkProperties)
112113

113114
// Use properties file as fallback for values which have a direct analog to
@@ -213,7 +214,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
213214
| verbose $verbose
214215
|
215216
|Default properties from $propertiesFile:
216-
|${getDefaultSparkProperties.mkString(" ", "\n ", "\n")}
217+
|${defaultSparkProperties.mkString(" ", "\n ", "\n")}
217218
""".stripMargin
218219
}
219220

0 commit comments

Comments
 (0)