File tree Expand file tree Collapse file tree 2 files changed +6
-2
lines changed
core/src/main/scala/org/apache/spark Expand file tree Collapse file tree 2 files changed +6
-2
lines changed Original file line number Diff line number Diff line change @@ -224,7 +224,6 @@ class SparkContext(config: SparkConf) extends Logging {
224224
225225 /** A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse. */
226226 val hadoopConfiguration : Configuration = {
227- val env = SparkEnv .get
228227 val hadoopConf = SparkHadoopUtil .get.newConfiguration()
229228 // Explicitly check for S3 environment variables
230229 if (System .getenv(" AWS_ACCESS_KEY_ID" ) != null &&
Original file line number Diff line number Diff line change @@ -91,8 +91,13 @@ private[spark] object MetadataCleaner {
9191 conf.set(MetadataCleanerType .systemProperty(cleanerType), delay.toString)
9292 }
9393
94+ /**
95+ * Set the default delay time (in seconds).
96+ * @param conf SparkConf instance
97+ * @param delay default delay time to set
98+ * @param resetAll whether to reset all to default
99+ */
94100 def setDelaySeconds (conf : SparkConf , delay : Int , resetAll : Boolean = true ) {
95- // override for all ?
96101 conf.set(" spark.cleaner.ttl" , delay.toString)
97102 if (resetAll) {
98103 for (cleanerType <- MetadataCleanerType .values) {
You can’t perform that action at this time.
0 commit comments