Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ private[spark] class EventLoggingListener(

EventLoggingListener.initEventLog(bstream, testing, loggedEvents)
fileSystem.setPermission(path, LOG_FILE_PERMISSIONS)
writer = Some(new PrintWriter(bstream))
writer = Some(new PrintWriter(new OutputStreamWriter(bstream, StandardCharsets.UTF_8)))
logInfo("Logging events to %s".format(logPath))
} catch {
case e: Exception =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.scheduler

import java.io.{EOFException, InputStream, IOException}

import scala.io.Source
import scala.io.{Codec, Source}

import com.fasterxml.jackson.core.JsonParseException
import com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException
Expand Down Expand Up @@ -54,7 +54,7 @@ private[spark] class ReplayListenerBus extends SparkListenerBus with Logging {
sourceName: String,
maybeTruncated: Boolean = false,
eventsFilter: ReplayEventsFilter = SELECT_ALL_FILTER): Unit = {
val lines = Source.fromInputStream(logData).getLines()
val lines = Source.fromInputStream(logData)(Codec.UTF8).getLines()
replay(lines, sourceName, maybeTruncated, eventsFilter)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.scheduler

import java.io._
import java.net.URI
import java.nio.charset.StandardCharsets
import java.util.concurrent.atomic.AtomicInteger

import scala.collection.mutable.ArrayBuffer
Expand Down Expand Up @@ -52,10 +53,11 @@ class ReplayListenerSuite extends SparkFunSuite with BeforeAndAfter with LocalSp
test("Simple replay") {
val logFilePath = getFilePath(testDir, "events.txt")
val fstream = fileSystem.create(logFilePath)
val fwriter = new OutputStreamWriter(fstream, StandardCharsets.UTF_8)
val applicationStart = SparkListenerApplicationStart("Greatest App (N)ever", None,
125L, "Mickey", None)
val applicationEnd = SparkListenerApplicationEnd(1000L)
Utils.tryWithResource(new PrintWriter(fstream)) { writer =>
Utils.tryWithResource(new PrintWriter(fwriter)) { writer =>
// scalastyle:off println
writer.println(compact(render(JsonProtocol.sparkEventToJson(applicationStart))))
writer.println(compact(render(JsonProtocol.sparkEventToJson(applicationEnd))))
Expand Down Expand Up @@ -88,7 +90,8 @@ class ReplayListenerSuite extends SparkFunSuite with BeforeAndAfter with LocalSp
val buffered = new ByteArrayOutputStream
val codec = new LZ4CompressionCodec(new SparkConf())
val compstream = codec.compressedOutputStream(buffered)
Utils.tryWithResource(new PrintWriter(compstream)) { writer =>
val cwriter = new OutputStreamWriter(compstream, StandardCharsets.UTF_8)
Utils.tryWithResource(new PrintWriter(cwriter)) { writer =>

val applicationStart = SparkListenerApplicationStart("AppStarts", None,
125L, "Mickey", None)
Expand Down Expand Up @@ -134,10 +137,11 @@ class ReplayListenerSuite extends SparkFunSuite with BeforeAndAfter with LocalSp
test("Replay incompatible event log") {
val logFilePath = getFilePath(testDir, "incompatible.txt")
val fstream = fileSystem.create(logFilePath)
val fwriter = new OutputStreamWriter(fstream, StandardCharsets.UTF_8)
val applicationStart = SparkListenerApplicationStart("Incompatible App", None,
125L, "UserUsingIncompatibleVersion", None)
val applicationEnd = SparkListenerApplicationEnd(1000L)
Utils.tryWithResource(new PrintWriter(fstream)) { writer =>
Utils.tryWithResource(new PrintWriter(fwriter)) { writer =>
// scalastyle:off println
writer.println(compact(render(JsonProtocol.sparkEventToJson(applicationStart))))
writer.println("""{"Event":"UnrecognizedEventOnlyForTest","Timestamp":1477593059313}""")
Expand Down
1 change: 1 addition & 0 deletions docs/core-migration-guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,4 @@ license: |

- In Spark 3.0, deprecated method `AccumulableInfo.apply` have been removed because creating `AccumulableInfo` is disallowed.

- In Spark 3.0, event log file will be written as UTF-8 encoding, and Spark History Server will replay event log files as UTF-8 encoding. Previously Spark writes event log file as default charset of driver JVM process, so Spark History Server of Spark 2.x is needed to read the old event log files in case of incompatible encoding.