Skip to content

Commit 1fd6ed9

Browse files
pwendellrxin
authored andcommitted
[SPARK-7204] [SQL] Fix callSite for Dataframe and SQL operations
This patch adds SQL to the set of excluded libraries when generating a callSite. This makes the callSite mechanism work properly for the data frame API. I also added a small improvement for JDBC queries where we just use the string "Spark JDBC Server Query" instead of trying to give a callsite that doesn't make any sense to the user. Before (DF): ![screen shot 2015-04-28 at 1 29 26 pm](https://cloud.githubusercontent.com/assets/320616/7380170/ef63bfb0-edae-11e4-989c-f88a5ba6bbee.png) After (DF): ![screen shot 2015-04-28 at 1 34 58 pm](https://cloud.githubusercontent.com/assets/320616/7380181/fa7f6d90-edae-11e4-9559-26f163ed63b8.png) After (JDBC): ![screen shot 2015-04-28 at 2 00 10 pm](https://cloud.githubusercontent.com/assets/320616/7380185/02f5b2a4-edaf-11e4-8e5b-99bdc3df66dd.png) Author: Patrick Wendell <[email protected]> Closes #5757 from pwendell/dataframes and squashes the following commits: 0d931a4 [Patrick Wendell] Attempting to fix PySpark tests 85bf740 [Patrick Wendell] [SPARK-7204] Fix callsite for dataframe operations.
1 parent fe917f5 commit 1fd6ed9

File tree

2 files changed

+21
-10
lines changed

2 files changed

+21
-10
lines changed

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 19 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1299,16 +1299,18 @@ private[spark] object Utils extends Logging {
12991299
}
13001300

13011301
/** Default filtering function for finding call sites using `getCallSite`. */
1302-
private def coreExclusionFunction(className: String): Boolean = {
1303-
// A regular expression to match classes of the "core" Spark API that we want to skip when
1304-
// finding the call site of a method.
1302+
private def sparkInternalExclusionFunction(className: String): Boolean = {
1303+
// A regular expression to match classes of the internal Spark API's
1304+
// that we want to skip when finding the call site of a method.
13051305
val SPARK_CORE_CLASS_REGEX =
13061306
"""^org\.apache\.spark(\.api\.java)?(\.util)?(\.rdd)?(\.broadcast)?\.[A-Z]""".r
1307+
val SPARK_SQL_CLASS_REGEX = """^org\.apache\.spark\.sql.*""".r
13071308
val SCALA_CORE_CLASS_PREFIX = "scala"
1308-
val isSparkCoreClass = SPARK_CORE_CLASS_REGEX.findFirstIn(className).isDefined
1309+
val isSparkClass = SPARK_CORE_CLASS_REGEX.findFirstIn(className).isDefined ||
1310+
SPARK_SQL_CLASS_REGEX.findFirstIn(className).isDefined
13091311
val isScalaClass = className.startsWith(SCALA_CORE_CLASS_PREFIX)
13101312
// If the class is a Spark internal class or a Scala class, then exclude.
1311-
isSparkCoreClass || isScalaClass
1313+
isSparkClass || isScalaClass
13121314
}
13131315

13141316
/**
@@ -1318,7 +1320,7 @@ private[spark] object Utils extends Logging {
13181320
*
13191321
* @param skipClass Function that is used to exclude non-user-code classes.
13201322
*/
1321-
def getCallSite(skipClass: String => Boolean = coreExclusionFunction): CallSite = {
1323+
def getCallSite(skipClass: String => Boolean = sparkInternalExclusionFunction): CallSite = {
13221324
// Keep crawling up the stack trace until we find the first function not inside of the spark
13231325
// package. We track the last (shallowest) contiguous Spark method. This might be an RDD
13241326
// transformation, a SparkContext function (such as parallelize), or anything else that leads
@@ -1357,9 +1359,17 @@ private[spark] object Utils extends Logging {
13571359
}
13581360

13591361
val callStackDepth = System.getProperty("spark.callstack.depth", "20").toInt
1360-
CallSite(
1361-
shortForm = s"$lastSparkMethod at $firstUserFile:$firstUserLine",
1362-
longForm = callStack.take(callStackDepth).mkString("\n"))
1362+
val shortForm =
1363+
if (firstUserFile == "HiveSessionImpl.java") {
1364+
// To be more user friendly, show a nicer string for queries submitted from the JDBC
1365+
// server.
1366+
"Spark JDBC Server Query"
1367+
} else {
1368+
s"$lastSparkMethod at $firstUserFile:$firstUserLine"
1369+
}
1370+
val longForm = callStack.take(callStackDepth).mkString("\n")
1371+
1372+
CallSite(shortForm, longForm)
13631373
}
13641374

13651375
/** Return a string containing part of a file from byte 'start' to 'end'. */

python/pyspark/sql/dataframe.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -237,7 +237,8 @@ def explain(self, extended=False):
237237
:param extended: boolean, default ``False``. If ``False``, prints only the physical plan.
238238
239239
>>> df.explain()
240-
PhysicalRDD [age#0,name#1], MapPartitionsRDD[...] at mapPartitions at SQLContext.scala:...
240+
PhysicalRDD [age#0,name#1], MapPartitionsRDD[...] at applySchemaToPythonRDD at\
241+
NativeMethodAccessorImpl.java:...
241242
242243
>>> df.explain(True)
243244
== Parsed Logical Plan ==

0 commit comments

Comments
 (0)