Skip to content

Commit e471d8e

Browse files
committed
rework
1 parent d8b6620 commit e471d8e

File tree

2 files changed

+12
-10
lines changed

2 files changed

+12
-10
lines changed

core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,6 @@ private[spark] class EventLoggingListener(
5252
private val logBaseDir = conf.get("spark.eventLog.dir", "/tmp/spark-events").stripSuffix("/")
5353
private val name = appName.replaceAll("[ :/]", "-").toLowerCase + "-" + System.currentTimeMillis
5454
val logDir = logBaseDir + "/" + name
55-
val LOG_FILE_PERMISSIONS: FsPermission =
56-
FsPermission.createImmutable(Integer.parseInt("770", 8).toShort: Short)
5755

5856
private val logger =
5957
new FileLogger(logDir, conf, hadoopConfiguration, outputBufferSize, shouldCompress,
@@ -67,11 +65,10 @@ private[spark] class EventLoggingListener(
6765
logInfo("Logging events to %s".format(logDir))
6866
if (shouldCompress) {
6967
val codec = conf.get("spark.io.compression.codec", CompressionCodec.DEFAULT_COMPRESSION_CODEC)
70-
logger.newFile(COMPRESSION_CODEC_PREFIX + codec, Some(LOG_FILE_PERMISSIONS))
68+
logger.newFile(COMPRESSION_CODEC_PREFIX + codec)
7169
}
72-
logger.newFile(SPARK_VERSION_PREFIX + SparkContext.SPARK_VERSION,
73-
Some(LOG_FILE_PERMISSIONS))
74-
logger.newFile(LOG_PREFIX + logger.fileIndex, Some(LOG_FILE_PERMISSIONS))
70+
logger.newFile(SPARK_VERSION_PREFIX + SparkContext.SPARK_VERSION)
71+
logger.newFile(LOG_PREFIX + logger.fileIndex)
7572
}
7673

7774
/** Log the event as JSON. */
@@ -118,7 +115,7 @@ private[spark] class EventLoggingListener(
118115
* In addition, create an empty special file to indicate application completion.
119116
*/
120117
def stop() = {
121-
logger.newFile(APPLICATION_COMPLETE, Some(LOG_FILE_PERMISSIONS))
118+
logger.newFile(APPLICATION_COMPLETE)
122119
logger.stop()
123120
}
124121
}
@@ -128,6 +125,9 @@ private[spark] object EventLoggingListener extends Logging {
128125
val SPARK_VERSION_PREFIX = "SPARK_VERSION_"
129126
val COMPRESSION_CODEC_PREFIX = "COMPRESSION_CODEC_"
130127
val APPLICATION_COMPLETE = "APPLICATION_COMPLETE"
128+
val LOG_FILE_PERMISSIONS: FsPermission =
129+
FsPermission.createImmutable(Integer.parseInt("770", 8).toShort)
130+
131131

132132
// A cache for compression codecs to avoid creating the same codec many times
133133
private val codecMap = new mutable.HashMap[String, CompressionCodec]

core/src/main/scala/org/apache/spark/util/FileLogger.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -83,14 +83,16 @@ private[spark] class FileLogger(
8383
}
8484
if (dirPermissions.isDefined) {
8585
val fsStatus = fileSystem.getFileStatus(path)
86-
if (fsStatus.getPermission().toShort() != dirPermissions.get.toShort()) {
87-
fileSystem.setPermission(path, dirPermissions.get);
86+
if (fsStatus.getPermission().toShort() != dirPermissions.get.toShort) {
87+
fileSystem.setPermission(path, dirPermissions.get)
8888
}
8989
}
9090
}
9191

9292
/**
9393
* Create a new writer for the file identified by the given path.
94+
* If the permissions are not passed in, it will default to use the permissions
95+
* (dirpermissions) used when class was instantiated.
9496
*/
9597
private def createWriter(fileName: String, perms: Option[FsPermission] = None): PrintWriter = {
9698
val logPath = logDir + "/" + fileName
@@ -110,7 +112,7 @@ private[spark] class FileLogger(
110112
hadoopDataStream.get
111113
}
112114

113-
perms.foreach {p => fileSystem.setPermission(path, p)}
115+
perms.orElse(dirPermissions).foreach {p => fileSystem.setPermission(path, p)}
114116
val bstream = new BufferedOutputStream(dstream, outputBufferSize)
115117
val cstream = if (compress) compressionCodec.compressedOutputStream(bstream) else bstream
116118
new PrintWriter(cstream)

0 commit comments

Comments
 (0)