Skip to content

Commit fc45476

Browse files
author
wangfei
committed
validate memoryfraction in sparkconf
1 parent 2e79b3d commit fc45476

File tree

3 files changed

+21
-9
lines changed

3 files changed

+21
-9
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,27 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
236236
}
237237
}
238238

239+
// Validate memoryFraction
240+
val storageMemFraction = getDouble("spark.storage.memoryFraction", 0.6)
241+
val shuffleMemFraction = getDouble("spark.shuffle.memoryFraction", 0.3)
242+
val shuffleSafFraction = getDouble("spark.shuffle.safetyFraction", 0.8)
243+
244+
if (storageMemFraction > 1 || storageMemFraction < 0) {
245+
val msg = s"spark.storage.memoryFraction should be between 0 and 1 " +
246+
s"(was '$storageMemFraction')."
247+
throw new IllegalArgumentException(msg)
248+
}
249+
if (shuffleMemFraction > 1 || shuffleMemFraction < 0) {
250+
val msg = s"spark.shuffle.memoryFraction should be between 0 and 1 " +
251+
s"(was '$shuffleMemFraction')."
252+
throw new IllegalArgumentException(msg)
253+
}
254+
if (shuffleSafFraction > 1 || shuffleSafFraction < 0) {
255+
val msg = s"spark.shuffle.safetyFraction should be between 0 and 1 " +
256+
s"(was '$shuffleSafFraction')."
257+
throw new IllegalArgumentException(msg)
258+
}
259+
239260
// Check for legacy configs
240261
sys.env.get("SPARK_JAVA_OPTS").foreach { value =>
241262
val error =

core/src/main/scala/org/apache/spark/storage/BlockManager.scala

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1045,9 +1045,6 @@ private[spark] object BlockManager extends Logging {
10451045

10461046
def getMaxMemory(conf: SparkConf): Long = {
10471047
val memoryFraction = conf.getDouble("spark.storage.memoryFraction", 0.6)
1048-
if (memoryFraction > 1 || memoryFraction < 0) {
1049-
throw new IllegalArgumentException("spark.storage.memoryFraction should be between 0 and 1.")
1050-
}
10511048
(Runtime.getRuntime.maxMemory * memoryFraction).toLong
10521049
}
10531050

core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -76,12 +76,6 @@ class ExternalAppendOnlyMap[K, V, C](
7676
private val maxMemoryThreshold = {
7777
val memoryFraction = sparkConf.getDouble("spark.shuffle.memoryFraction", 0.3)
7878
val safetyFraction = sparkConf.getDouble("spark.shuffle.safetyFraction", 0.8)
79-
if (memoryFraction > 1 || memoryFraction < 0) {
80-
throw new IllegalArgumentException("spark.shuffle.memoryFraction should be between 0 and 1.")
81-
}
82-
if (safetyFraction > 1 || safetyFraction < 0) {
83-
throw new IllegalArgumentException("spark.shuffle.safetyFraction should be between 0 and 1.")
84-
}
8579
(Runtime.getRuntime.maxMemory * memoryFraction * safetyFraction).toLong
8680
}
8781

0 commit comments

Comments
 (0)