Skip to content

Commit 32bcf9a

Browse files
yhuaimateiz
authored andcommitted
[SPARK-2683] unidoc failed because org.apache.spark.util.CallSite uses Java keywords as value names
Renaming `short` to `shortForm` and `long` to `longForm`. JIRA: https://issues.apache.org/jira/browse/SPARK-2683 Author: Yin Huai <[email protected]> Closes apache#1585 from yhuai/SPARK-2683 and squashes the following commits: 5ddb843 [Yin Huai] "short" and "long" are Java keyworks. In order to generate javadoc, renaming "short" to "shortForm" and "long" to "longForm".
1 parent a2715cc commit 32bcf9a

File tree

6 files changed

+16
-14
lines changed

6 files changed

+16
-14
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1037,7 +1037,7 @@ class SparkContext(config: SparkConf) extends Logging {
10371037
*/
10381038
private[spark] def getCallSite(): CallSite = {
10391039
Option(getLocalProperty("externalCallSite")) match {
1040-
case Some(callSite) => CallSite(callSite, long = "")
1040+
case Some(callSite) => CallSite(callSite, longForm = "")
10411041
case None => Utils.getCallSite
10421042
}
10431043
}
@@ -1059,11 +1059,12 @@ class SparkContext(config: SparkConf) extends Logging {
10591059
}
10601060
val callSite = getCallSite
10611061
val cleanedFunc = clean(func)
1062-
logInfo("Starting job: " + callSite.short)
1062+
logInfo("Starting job: " + callSite.shortForm)
10631063
val start = System.nanoTime
10641064
dagScheduler.runJob(rdd, cleanedFunc, partitions, callSite, allowLocal,
10651065
resultHandler, localProperties.get)
1066-
logInfo("Job finished: " + callSite.short + ", took " + (System.nanoTime - start) / 1e9 + " s")
1066+
logInfo(
1067+
"Job finished: " + callSite.shortForm + ", took " + (System.nanoTime - start) / 1e9 + " s")
10671068
rdd.doCheckpoint()
10681069
}
10691070

@@ -1144,11 +1145,12 @@ class SparkContext(config: SparkConf) extends Logging {
11441145
evaluator: ApproximateEvaluator[U, R],
11451146
timeout: Long): PartialResult[R] = {
11461147
val callSite = getCallSite
1147-
logInfo("Starting job: " + callSite.short)
1148+
logInfo("Starting job: " + callSite.shortForm)
11481149
val start = System.nanoTime
11491150
val result = dagScheduler.runApproximateJob(rdd, func, evaluator, callSite, timeout,
11501151
localProperties.get)
1151-
logInfo("Job finished: " + callSite.short + ", took " + (System.nanoTime - start) / 1e9 + " s")
1152+
logInfo(
1153+
"Job finished: " + callSite.shortForm + ", took " + (System.nanoTime - start) / 1e9 + " s")
11521154
result
11531155
}
11541156

core/src/main/scala/org/apache/spark/rdd/RDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1225,7 +1225,7 @@ abstract class RDD[T: ClassTag](
12251225

12261226
/** User code that created this RDD (e.g. `textFile`, `parallelize`). */
12271227
@transient private[spark] val creationSite = Utils.getCallSite
1228-
private[spark] def getCreationSite: String = Option(creationSite).map(_.short).getOrElse("")
1228+
private[spark] def getCreationSite: String = Option(creationSite).map(_.shortForm).getOrElse("")
12291229

12301230
private[spark] def elementClassTag: ClassTag[T] = classTag[T]
12311231

core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -455,7 +455,7 @@ class DAGScheduler(
455455
waiter.awaitResult() match {
456456
case JobSucceeded => {}
457457
case JobFailed(exception: Exception) =>
458-
logInfo("Failed to run " + callSite.short)
458+
logInfo("Failed to run " + callSite.shortForm)
459459
throw exception
460460
}
461461
}
@@ -679,7 +679,7 @@ class DAGScheduler(
679679
val job = new ActiveJob(jobId, finalStage, func, partitions, callSite, listener, properties)
680680
clearCacheLocs()
681681
logInfo("Got job %s (%s) with %d output partitions (allowLocal=%s)".format(
682-
job.jobId, callSite.short, partitions.length, allowLocal))
682+
job.jobId, callSite.shortForm, partitions.length, allowLocal))
683683
logInfo("Final stage: " + finalStage + "(" + finalStage.name + ")")
684684
logInfo("Parents of final stage: " + finalStage.parents)
685685
logInfo("Missing parents: " + getMissingParentStages(finalStage))

core/src/main/scala/org/apache/spark/scheduler/Stage.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,8 @@ private[spark] class Stage(
108108

109109
def attemptId: Int = nextAttemptId
110110

111-
val name = callSite.short
112-
val details = callSite.long
111+
val name = callSite.shortForm
112+
val details = callSite.longForm
113113

114114
override def toString = "Stage " + id
115115

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ import org.apache.spark.executor.ExecutorUncaughtExceptionHandler
4444
import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
4545

4646
/** CallSite represents a place in user code. It can have a short and a long form. */
47-
private[spark] case class CallSite(short: String, long: String)
47+
private[spark] case class CallSite(shortForm: String, longForm: String)
4848

4949
/**
5050
* Various utility methods used by Spark.
@@ -848,8 +848,8 @@ private[spark] object Utils extends Logging {
848848
}
849849
val callStackDepth = System.getProperty("spark.callstack.depth", "20").toInt
850850
CallSite(
851-
short = "%s at %s:%s".format(lastSparkMethod, firstUserFile, firstUserLine),
852-
long = callStack.take(callStackDepth).mkString("\n"))
851+
shortForm = "%s at %s:%s".format(lastSparkMethod, firstUserFile, firstUserLine),
852+
longForm = callStack.take(callStackDepth).mkString("\n"))
853853
}
854854

855855
/** Return a string containing part of a file from byte 'start' to 'end'. */

core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ package object testPackage extends Assertions {
7070
def runCallSiteTest(sc: SparkContext) {
7171
val rdd = sc.makeRDD(Array(1, 2, 3, 4), 2)
7272
val rddCreationSite = rdd.getCreationSite
73-
val curCallSite = sc.getCallSite().short // note: 2 lines after definition of "rdd"
73+
val curCallSite = sc.getCallSite().shortForm // note: 2 lines after definition of "rdd"
7474

7575
val rddCreationLine = rddCreationSite match {
7676
case CALL_SITE_REGEX(func, file, line) => {

0 commit comments

Comments
 (0)