Skip to content

Commit e43c72f

Browse files
rxinaarondav
authored andcommitted
Add more debug message for ManagedBuffer
This is to help debug the error reported at http://apache-spark-user-list.1001560.n3.nabble.com/SQL-queries-fail-in-1-2-0-SNAPSHOT-td15327.html Author: Reynold Xin <[email protected]> Closes apache#2580 from rxin/buffer-debug and squashes the following commits: 5814292 [Reynold Xin] Logging close() in case close() fails. 323dfec [Reynold Xin] Add more debug message.
1 parent dab1b0a commit e43c72f

File tree

2 files changed

+51
-6
lines changed

2 files changed

+51
-6
lines changed

core/src/main/scala/org/apache/spark/network/ManagedBuffer.scala

Lines changed: 37 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,17 @@
1717

1818
package org.apache.spark.network
1919

20-
import java.io.{FileInputStream, RandomAccessFile, File, InputStream}
20+
import java.io._
2121
import java.nio.ByteBuffer
2222
import java.nio.channels.FileChannel
2323
import java.nio.channels.FileChannel.MapMode
2424

25+
import scala.util.Try
26+
2527
import com.google.common.io.ByteStreams
2628
import io.netty.buffer.{ByteBufInputStream, ByteBuf}
2729

28-
import org.apache.spark.util.ByteBufferInputStream
30+
import org.apache.spark.util.{ByteBufferInputStream, Utils}
2931

3032

3133
/**
@@ -71,18 +73,47 @@ final class FileSegmentManagedBuffer(val file: File, val offset: Long, val lengt
7173
try {
7274
channel = new RandomAccessFile(file, "r").getChannel
7375
channel.map(MapMode.READ_ONLY, offset, length)
76+
} catch {
77+
case e: IOException =>
78+
Try(channel.size).toOption match {
79+
case Some(fileLen) =>
80+
throw new IOException(s"Error in reading $this (actual file length $fileLen)", e)
81+
case None =>
82+
throw new IOException(s"Error in opening $this", e)
83+
}
7484
} finally {
7585
if (channel != null) {
76-
channel.close()
86+
Utils.tryLog(channel.close())
7787
}
7888
}
7989
}
8090

8191
override def inputStream(): InputStream = {
82-
val is = new FileInputStream(file)
83-
is.skip(offset)
84-
ByteStreams.limit(is, length)
92+
var is: FileInputStream = null
93+
try {
94+
is = new FileInputStream(file)
95+
is.skip(offset)
96+
ByteStreams.limit(is, length)
97+
} catch {
98+
case e: IOException =>
99+
if (is != null) {
100+
Utils.tryLog(is.close())
101+
}
102+
Try(file.length).toOption match {
103+
case Some(fileLen) =>
104+
throw new IOException(s"Error in reading $this (actual file length $fileLen)", e)
105+
case None =>
106+
throw new IOException(s"Error in opening $this", e)
107+
}
108+
case e: Throwable =>
109+
if (is != null) {
110+
Utils.tryLog(is.close())
111+
}
112+
throw e
113+
}
85114
}
115+
116+
override def toString: String = s"${getClass.getName}($file, $offset, $length)"
86117
}
87118

88119

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1304,6 +1304,20 @@ private[spark] object Utils extends Logging {
13041304
}
13051305
}
13061306

1307+
/** Executes the given block in a Try, logging any uncaught exceptions. */
1308+
def tryLog[T](f: => T): Try[T] = {
1309+
try {
1310+
val res = f
1311+
scala.util.Success(res)
1312+
} catch {
1313+
case ct: ControlThrowable =>
1314+
throw ct
1315+
case t: Throwable =>
1316+
logError(s"Uncaught exception in thread ${Thread.currentThread().getName}", t)
1317+
scala.util.Failure(t)
1318+
}
1319+
}
1320+
13071321
/** Returns true if the given exception was fatal. See docs for scala.util.control.NonFatal. */
13081322
def isFatalError(e: Throwable): Boolean = {
13091323
e match {

0 commit comments

Comments
 (0)