Skip to content

Commit 1d90cc3

Browse files
committed
Changes for SPARK-1853
1 parent 5f3105a commit 1d90cc3

File tree

4 files changed

+2
-4
lines changed

4 files changed

+2
-4
lines changed

conf/log4j.properties.template

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ log4j.appender.console=org.apache.log4j.ConsoleAppender
44
log4j.appender.console.target=System.err
55
log4j.appender.console.layout=org.apache.log4j.PatternLayout
66
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
7-
log4j.logger.org.apache.spark.rdd.RDD=INFO
87

98
# Settings to quiet third party logs that are too verbose
109
log4j.logger.org.eclipse.jetty=WARN

core/src/main/scala/org/apache/spark/rdd/RDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ abstract class RDD[T: ClassTag](
125125
val id: Int = sc.newRddId()
126126

127127
/** A friendly name for this RDD */
128-
@transient var name: String = sc.getLocalProperty("rddName")
128+
@transient var name: String = null
129129

130130
/** Assign a name to this RDD */
131131
def setName(_name: String): this.type = {

streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ class ForEachDStream[T: ClassTag] (
3535
override def compute(validTime: Time): Option[RDD[Unit]] = None
3636

3737
override def generateJob(time: Time): Option[Job] = {
38-
return parent.getOrCompute(time) match {
38+
parent.getOrCompute(time) match {
3939
case Some(rdd) =>
4040
val jobFunc = () => {
4141
foreachFunc(rdd, time)

streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import org.apache.spark.{SparkException, SparkEnv, Logging}
2222
import org.apache.spark.streaming.{Checkpoint, Time, CheckpointWriter}
2323
import org.apache.spark.streaming.util.{ManualClock, RecurringTimer, Clock}
2424
import scala.util.{Failure, Success, Try}
25-
import org.apache.spark.util.{CallSite, Utils}
2625

2726
/** Event classes for JobGenerator */
2827
private[scheduler] sealed trait JobGeneratorEvent

0 commit comments

Comments
 (0)