Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -128,16 +128,19 @@ private[spark] class DiskBlockObjectWriter(
*/
private def closeResources(): Unit = {
if (initialized) {
mcs.manualClose()
channel = null
mcs = null
bs = null
fos = null
ts = null
objOut = null
initialized = false
streamOpen = false
hasBeenClosed = true
Utils.tryWithSafeFinally {
mcs.manualClose()
} {
channel = null
mcs = null
bs = null
fos = null
ts = null
objOut = null
initialized = false
streamOpen = false
hasBeenClosed = true
}
}
}

Expand Down Expand Up @@ -199,26 +202,29 @@ private[spark] class DiskBlockObjectWriter(
def revertPartialWritesAndClose(): File = {
// Discard current writes. We do this by flushing the outstanding writes and then
// truncating the file to its initial position.
try {
Utils.tryWithSafeFinally {
if (initialized) {
writeMetrics.decBytesWritten(reportedPosition - committedPosition)
writeMetrics.decRecordsWritten(numRecordsWritten)
streamOpen = false
closeResources()
}

val truncateStream = new FileOutputStream(file, true)
} {
var truncateStream: FileOutputStream = null
try {
truncateStream = new FileOutputStream(file, true)
truncateStream.getChannel.truncate(committedPosition)
file
} catch {
case e: Exception =>
logError("Uncaught exception while reverting partial writes to file " + file, e)
} finally {
truncateStream.close()
if (truncateStream != null) {
truncateStream.close()
truncateStream = null
}
}
} catch {
case e: Exception =>
logError("Uncaught exception while reverting partial writes to file " + file, e)
file
}
file
}

/**
Expand Down