Skip to content

Commit 9787fff

Browse files
committed
Merge remote-tracking branch 'upstream/master' into SPARK-2177
2 parents 440c5af + 67fca18 commit 9787fff

File tree

2 files changed

+6
-2
lines changed

2 files changed

+6
-2
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -224,7 +224,6 @@ class SparkContext(config: SparkConf) extends Logging {
224224

225225
/** A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse. */
226226
val hadoopConfiguration: Configuration = {
227-
val env = SparkEnv.get
228227
val hadoopConf = SparkHadoopUtil.get.newConfiguration()
229228
// Explicitly check for S3 environment variables
230229
if (System.getenv("AWS_ACCESS_KEY_ID") != null &&

core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,8 +91,13 @@ private[spark] object MetadataCleaner {
9191
conf.set(MetadataCleanerType.systemProperty(cleanerType), delay.toString)
9292
}
9393

94+
/**
95+
* Set the default delay time (in seconds).
96+
* @param conf SparkConf instance
97+
* @param delay default delay time to set
98+
* @param resetAll whether to reset all to default
99+
*/
94100
def setDelaySeconds(conf: SparkConf, delay: Int, resetAll: Boolean = true) {
95-
// override for all ?
96101
conf.set("spark.cleaner.ttl", delay.toString)
97102
if (resetAll) {
98103
for (cleanerType <- MetadataCleanerType.values) {

0 commit comments

Comments
 (0)