Skip to content

Commit dab3796

Browse files
committed
Revert "[SPARK-1470][SPARK-1842] Use the scala-logging wrapper instead of the directly sfl4j api"
This reverts commit adc8303.
1 parent adc8303 commit dab3796

File tree

35 files changed

+97
-203
lines changed

35 files changed

+97
-203
lines changed

core/pom.xml

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -98,10 +98,6 @@
9898
<groupId>org.slf4j</groupId>
9999
<artifactId>jcl-over-slf4j</artifactId>
100100
</dependency>
101-
<dependency>
102-
<groupId>com.typesafe.scala-logging</groupId>
103-
<artifactId>scala-logging-slf4j_${scala.binary.version}</artifactId>
104-
</dependency>
105101
<dependency>
106102
<groupId>log4j</groupId>
107103
<artifactId>log4j</artifactId>

core/src/main/scala/org/apache/spark/Logging.scala

Lines changed: 15 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,8 @@
1818
package org.apache.spark
1919

2020
import org.apache.log4j.{LogManager, PropertyConfigurator}
21-
import org.slf4j.LoggerFactory
21+
import org.slf4j.{Logger, LoggerFactory}
2222
import org.slf4j.impl.StaticLoggerBinder
23-
import com.typesafe.scalalogging.slf4j.Logger
2423

2524
import org.apache.spark.annotation.DeveloperApi
2625
import org.apache.spark.util.Utils
@@ -40,69 +39,61 @@ trait Logging {
4039
// be serialized and used on another machine
4140
@transient private var log_ : Logger = null
4241

43-
// Method to get the logger name for this object
44-
protected def logName = {
45-
var className = this.getClass.getName
46-
// Ignore trailing $'s in the class names for Scala objects
47-
if (className.endsWith("$")) {
48-
className = className.substring(0, className.length - 1)
49-
}
50-
className
51-
}
52-
5342
// Method to get or create the logger for this object
5443
protected def log: Logger = {
5544
if (log_ == null) {
5645
initializeIfNecessary()
57-
log_ = Logger(LoggerFactory.getLogger(logName))
46+
var className = this.getClass.getName
47+
// Ignore trailing $'s in the class names for Scala objects
48+
log_ = LoggerFactory.getLogger(className.stripSuffix("$"))
5849
}
5950
log_
6051
}
6152

6253
// Log methods that take only a String
6354
protected def logInfo(msg: => String) {
64-
log.info(msg)
55+
if (log.isInfoEnabled) log.info(msg)
6556
}
6657

6758
protected def logDebug(msg: => String) {
68-
log.debug(msg)
59+
if (log.isDebugEnabled) log.debug(msg)
6960
}
7061

7162
protected def logTrace(msg: => String) {
72-
log.trace(msg)
63+
if (log.isTraceEnabled) log.trace(msg)
7364
}
7465

7566
protected def logWarning(msg: => String) {
76-
log.warn(msg)
67+
if (log.isWarnEnabled) log.warn(msg)
7768
}
7869

7970
protected def logError(msg: => String) {
80-
log.error(msg)
71+
if (log.isErrorEnabled) log.error(msg)
8172
}
8273

8374
// Log methods that take Throwables (Exceptions/Errors) too
8475
protected def logInfo(msg: => String, throwable: Throwable) {
85-
log.info(msg, throwable)
76+
if (log.isInfoEnabled) log.info(msg, throwable)
8677
}
8778

8879
protected def logDebug(msg: => String, throwable: Throwable) {
89-
log.debug(msg, throwable)
80+
if (log.isDebugEnabled) log.debug(msg, throwable)
9081
}
9182

9283
protected def logTrace(msg: => String, throwable: Throwable) {
93-
log.trace(msg, throwable)
84+
if (log.isTraceEnabled) log.trace(msg, throwable)
9485
}
9586

9687
protected def logWarning(msg: => String, throwable: Throwable) {
97-
log.warn(msg, throwable)
88+
if (log.isWarnEnabled) log.warn(msg, throwable)
9889
}
9990

10091
protected def logError(msg: => String, throwable: Throwable) {
101-
log.error(msg, throwable)
92+
if (log.isErrorEnabled) log.error(msg, throwable)
10293
}
10394

10495
protected def isTraceEnabled(): Boolean = {
105-
log.underlying.isTraceEnabled
96+
log.isTraceEnabled
10697
}
10798

10899
private def initializeIfNecessary() {

core/src/main/scala/org/apache/spark/util/SignalLogger.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.util
1919

2020
import org.apache.commons.lang3.SystemUtils
21-
import com.typesafe.scalalogging.slf4j.Logger
21+
import org.slf4j.Logger
2222
import sun.misc.{Signal, SignalHandler}
2323

2424
/**

mllib/pom.xml

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -59,10 +59,6 @@
5959
<artifactId>breeze_${scala.binary.version}</artifactId>
6060
<version>0.7</version>
6161
<exclusions>
62-
<exclusion>
63-
<groupId>com.typesafe</groupId>
64-
<artifactId>scalalogging-slf4j_${scala.binary.version}</artifactId>
65-
</exclusion>
6662
<!-- This is included as a compile-scoped dependency by jtransforms, which is
6763
a dependency of breeze. -->
6864
<exclusion>

pom.xml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -279,11 +279,6 @@
279279
<artifactId>slf4j-log4j12</artifactId>
280280
<version>${slf4j.version}</version>
281281
</dependency>
282-
<dependency>
283-
<groupId>com.typesafe.scala-logging</groupId>
284-
<artifactId>scala-logging-slf4j_${scala.binary.version}</artifactId>
285-
<version>2.1.2</version>
286-
</dependency>
287282
<dependency>
288283
<groupId>org.slf4j</groupId>
289284
<artifactId>jul-to-slf4j</artifactId>

project/MimaExcludes.scala

Lines changed: 2 additions & 89 deletions
Original file line numberDiff line numberDiff line change
@@ -103,101 +103,14 @@ object MimaExcludes {
103103
ProblemFilters.exclude[IncompatibleMethTypeProblem](
104104
"org.apache.spark.mllib.tree.impurity.Variance.calculate")
105105
) ++
106-
Seq( // Package-private classes removed in SPARK-2341
106+
Seq ( // Package-private classes removed in SPARK-2341
107107
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.BinaryLabelParser"),
108108
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.BinaryLabelParser$"),
109109
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.LabelParser"),
110110
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.LabelParser$"),
111111
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.MulticlassLabelParser"),
112112
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.MulticlassLabelParser$")
113-
) ++
114-
Seq(
115-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
116-
("org.apache.spark.bagel.Bagel.log"),
117-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
118-
("org.apache.spark.streaming.StreamingContext.log"),
119-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
120-
("org.apache.spark.streaming.dstream.DStream.log"),
121-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
122-
("org.apache.spark.mllib.recommendation.ALS.log"),
123-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
124-
("org.apache.spark.mllib.clustering.KMeans.log"),
125-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
126-
("org.apache.spark.mllib.classification.NaiveBayes.log"),
127-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
128-
("org.apache.spark.streaming.kafka.KafkaReceiver.log"),
129-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
130-
("org.apache.spark.SparkContext.log"),
131-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
132-
("org.apache.spark.rdd.PairRDDFunctions.log"),
133-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
134-
("org.apache.spark.rdd.OrderedRDDFunctions.log"),
135-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
136-
("org.apache.spark.rdd.SequenceFileRDDFunctions.log"),
137-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
138-
("org.apache.spark.rdd.DoubleRDDFunctions.log"),
139-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
140-
("org.apache.spark.streaming.twitter.TwitterReceiver.log"),
141-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
142-
("org.apache.spark.streaming.zeromq.ZeroMQReceiver.log"),
143-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
144-
("org.apache.spark.streaming.flume.FlumeReceiver.log"),
145-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
146-
("org.apache.spark.rdd.RDD.log"),
147-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
148-
("org.apache.spark.SparkConf.log"),
149-
150-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
151-
("org.apache.spark.SparkConf.org$apache$spark$Logging$$log__="),
152-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
153-
("org.apache.spark.bagel.Bagel.org$apache$spark$Logging$$log__="),
154-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
155-
("org.apache.spark.streaming.StreamingContext.org$apache$spark$Logging$$log__="),
156-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
157-
("org.apache.spark.streaming.dstream.DStream.org$apache$spark$Logging$$log__="),
158-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
159-
("org.apache.spark.mllib.recommendation.ALS.org$apache$spark$Logging$$log__="),
160-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
161-
("org.apache.spark.mllib.clustering.KMeans.org$apache$spark$Logging$$log__="),
162-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
163-
("org.apache.spark.mllib.classification.NaiveBayes.org$apache$spark$Logging$$log__="),
164-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
165-
("org.apache.spark.streaming.twitter.TwitterReceiver.org$apache$spark$Logging$$log__="),
166-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
167-
("org.apache.spark.streaming.zeromq.ZeroMQReceiver.org$apache$spark$Logging$$log__="),
168-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
169-
("org.apache.spark.SparkContext.org$apache$spark$Logging$$log__="),
170-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
171-
("org.apache.spark.rdd.RDD.org$apache$spark$Logging$$log__="),
172-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
173-
("org.apache.spark.rdd.SequenceFileRDDFunctions.org$apache$spark$Logging$$log__="),
174-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
175-
("org.apache.spark.rdd.OrderedRDDFunctions.org$apache$spark$Logging$$log__="),
176-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
177-
("org.apache.spark.rdd.PairRDDFunctions.org$apache$spark$Logging$$log__="),
178-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
179-
("org.apache.spark.streaming.kafka.KafkaReceiver.org$apache$spark$Logging$$log__="),
180-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
181-
("org.apache.spark.rdd.DoubleRDDFunctions.org$apache$spark$Logging$$log__="),
182-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
183-
("org.apache.spark.streaming.flume.FlumeReceiver.org$apache$spark$Logging$$log__="),
184-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
185-
("org.apache.spark.streaming.kafka.KafkaReceiver.org$apache$spark$Logging$$log_"),
186-
ProblemFilters.exclude[IncompatibleMethTypeProblem]
187-
("org.apache.spark.streaming.twitter.TwitterReceiver.org$apache$spark$Logging$$log_"),
188-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
189-
("org.apache.spark.streaming.twitter.TwitterReceiver.org$apache$spark$Logging$$log_"),
190-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
191-
("org.apache.spark.streaming.zeromq.ZeroMQReceiver.org$apache$spark$Logging$$log_"),
192-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
193-
("org.apache.spark.bagel.Bagel.org$apache$spark$Logging$$log_"),
194-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
195-
("org.apache.spark.bagel.Bagel.org$apache$spark$Logging$$log_"),
196-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
197-
("org.apache.spark.streaming.flume.FlumeReceiver.org$apache$spark$Logging$$log_"),
198-
ProblemFilters.exclude[IncompatibleResultTypeProblem]
199-
("org.apache.spark.streaming.kafka.KafkaReceiver.org$apache$spark$Logging$$log_")
200-
)
113+
)
201114
case v if v.startsWith("1.0") =>
202115
Seq(
203116
MimaBuild.excludeSparkPackage("api.java"),

sql/catalyst/pom.xml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,11 @@
5454
<artifactId>spark-core_${scala.binary.version}</artifactId>
5555
<version>${project.version}</version>
5656
</dependency>
57+
<dependency>
58+
<groupId>com.typesafe</groupId>
59+
<artifactId>scalalogging-slf4j_${scala.binary.version}</artifactId>
60+
<version>1.0.1</version>
61+
</dependency>
5762
<dependency>
5863
<groupId>org.scalatest</groupId>
5964
<artifactId>scalatest_${scala.binary.version}</artifactId>

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -109,12 +109,12 @@ class Analyzer(catalog: Catalog, registry: FunctionRegistry, caseSensitive: Bool
109109
object ResolveReferences extends Rule[LogicalPlan] {
110110
def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
111111
case q: LogicalPlan if q.childrenResolved =>
112-
log.trace(s"Attempting to resolve ${q.simpleString}")
112+
logger.trace(s"Attempting to resolve ${q.simpleString}")
113113
q transformExpressions {
114114
case u @ UnresolvedAttribute(name) =>
115115
// Leave unchanged if resolution fails. Hopefully will be resolved next round.
116116
val result = q.resolve(name).getOrElse(u)
117-
log.debug(s"Resolving $u to $result")
117+
logger.debug(s"Resolving $u to $result")
118118
result
119119
}
120120
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ trait HiveTypeCoercion {
7575
// Leave the same if the dataTypes match.
7676
case Some(newType) if a.dataType == newType.dataType => a
7777
case Some(newType) =>
78-
log.debug(s"Promoting $a to $newType in ${q.simpleString}}")
78+
logger.debug(s"Promoting $a to $newType in ${q.simpleString}}")
7979
newType
8080
}
8181
}
@@ -154,7 +154,7 @@ trait HiveTypeCoercion {
154154
(Alias(Cast(l, StringType), l.name)(), r)
155155

156156
case (l, r) if l.dataType != r.dataType =>
157-
log.debug(s"Resolving mismatched union input ${l.dataType}, ${r.dataType}")
157+
logger.debug(s"Resolving mismatched union input ${l.dataType}, ${r.dataType}")
158158
findTightestCommonType(l.dataType, r.dataType).map { widestType =>
159159
val newLeft =
160160
if (l.dataType == widestType) l else Alias(Cast(l, widestType), l.name)()
@@ -170,15 +170,15 @@ trait HiveTypeCoercion {
170170

171171
val newLeft =
172172
if (castedLeft.map(_.dataType) != left.output.map(_.dataType)) {
173-
log.debug(s"Widening numeric types in union $castedLeft ${left.output}")
173+
logger.debug(s"Widening numeric types in union $castedLeft ${left.output}")
174174
Project(castedLeft, left)
175175
} else {
176176
left
177177
}
178178

179179
val newRight =
180180
if (castedRight.map(_.dataType) != right.output.map(_.dataType)) {
181-
log.debug(s"Widening numeric types in union $castedRight ${right.output}")
181+
logger.debug(s"Widening numeric types in union $castedRight ${right.output}")
182182
Project(castedRight, right)
183183
} else {
184184
right

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.catalyst.expressions
1919

20-
import org.apache.spark.Logging
20+
import org.apache.spark.sql.catalyst.Logging
2121
import org.apache.spark.sql.catalyst.errors.attachTree
2222
import org.apache.spark.sql.catalyst.types._
2323
import org.apache.spark.sql.catalyst.trees

0 commit comments

Comments
 (0)