File tree Expand file tree Collapse file tree 1 file changed +8
-8
lines changed
core/src/main/scala/org/apache/spark/internal/config Expand file tree Collapse file tree 1 file changed +8
-8
lines changed Original file line number Diff line number Diff line change @@ -1095,14 +1095,6 @@ package object config {
10951095 .booleanConf
10961096 .createWithDefault(false )
10971097
1098- private [spark] val SHUFFLE_HOST_LOCAL_DISK_READING_ENABLED =
1099- ConfigBuilder (" spark.shuffle.readHostLocalDisk.enabled" )
1100- .doc(s " If enabled (and ` ${SHUFFLE_USE_OLD_FETCH_PROTOCOL .key}` is disabled), shuffle " +
1101- " blocks requested from those block managers which are running on the same host are read " +
1102- " from the disk directly instead of being fetched as remote blocks over the network." )
1103- .booleanConf
1104- .createWithDefault(true )
1105-
11061098 private [spark] val STORAGE_LOCAL_DISK_BY_EXECUTORS_CACHE_SIZE =
11071099 ConfigBuilder (" spark.storage.localDiskByExecutors.cacheSize" )
11081100 .doc(" The max number of executors for which the local dirs are stored. This size is " +
@@ -1148,6 +1140,14 @@ package object config {
11481140 .booleanConf
11491141 .createWithDefault(false )
11501142
1143+ private [spark] val SHUFFLE_HOST_LOCAL_DISK_READING_ENABLED =
1144+ ConfigBuilder (" spark.shuffle.readHostLocalDisk.enabled" )
1145+ .doc(s " If enabled (and ` ${SHUFFLE_USE_OLD_FETCH_PROTOCOL .key}` is disabled), shuffle " +
1146+ " blocks requested from those block managers which are running on the same host are read " +
1147+ " from the disk directly instead of being fetched as remote blocks over the network." )
1148+ .booleanConf
1149+ .createWithDefault(true )
1150+
11511151 private [spark] val MEMORY_MAP_LIMIT_FOR_TESTS =
11521152 ConfigBuilder (" spark.storage.memoryMapLimitForTests" )
11531153 .internal()
You can’t perform that action at this time.
0 commit comments