From 97b5a4860cb9e18c1d75b43b01328d79a4b3fad9 Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Tue, 4 Jan 2022 14:59:45 +0800 Subject: [PATCH 1/3] refactor first --- .../scala/org/apache/spark/TestUtils.scala | 27 ++++++++++--------- 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/TestUtils.scala b/core/src/main/scala/org/apache/spark/TestUtils.scala index 9bc6ccbd0df65..8708362f1dce8 100644 --- a/core/src/main/scala/org/apache/spark/TestUtils.scala +++ b/core/src/main/scala/org/apache/spark/TestUtils.scala @@ -24,7 +24,7 @@ import java.nio.file.{Files => JavaFiles, Paths} import java.nio.file.attribute.PosixFilePermission.{OWNER_EXECUTE, OWNER_READ, OWNER_WRITE} import java.security.SecureRandom import java.security.cert.X509Certificate -import java.util.{Arrays, EnumSet, Locale, Properties} +import java.util.{Arrays, EnumSet, Locale} import java.util.concurrent.{TimeoutException, TimeUnit} import java.util.jar.{JarEntry, JarOutputStream, Manifest} import java.util.regex.Pattern @@ -41,9 +41,9 @@ import scala.util.Try import com.google.common.io.{ByteStreams, Files} import org.apache.commons.lang3.StringUtils -// scalastyle:off -import org.apache.log4j.PropertyConfigurator -// scalastyle:on +import org.apache.logging.log4j.LogManager +import org.apache.logging.log4j.core.LoggerContext +import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilderFactory import org.eclipse.jetty.server.Handler import org.eclipse.jetty.server.Server import org.eclipse.jetty.server.handler.DefaultHandler @@ -421,14 +421,17 @@ private[spark] object TestUtils { * config a log4j properties used for testsuite */ def configTestLog4j(level: String): Unit = { - val pro = new Properties() - pro.put("log4j.rootLogger", s"$level, console") - pro.put("log4j.appender.console", "org.apache.log4j.ConsoleAppender") - pro.put("log4j.appender.console.target", "System.err") - pro.put("log4j.appender.console.layout", "org.apache.log4j.PatternLayout") - pro.put("log4j.appender.console.layout.ConversionPattern", - "%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n") - PropertyConfigurator.configure(pro) + val configuration = ConfigurationBuilderFactory.newConfigurationBuilder() + .addRootProperty("rootLogger.level", s"$level") + .addRootProperty("rootLogger.appenderRef.stdout.ref", "console") + .addProperty("appender.console.type", "Console") + .addProperty("appender.console.name", "console") + .addProperty("appender.console.target", "SYSTEM_ERR") + .addProperty("appender.console.layout.type", "PatternLayout") + .addProperty("appender.console.layout.pattern", + "%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n") + .build() + LogManager.getContext(false).asInstanceOf[LoggerContext].reconfigure(configuration) } /** From 8572b873d984c4e3833199debb515de1f9202247 Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Tue, 4 Jan 2022 17:14:42 +0800 Subject: [PATCH 2/3] rename to configTestLog4j2 --- .../scala/org/apache/spark/TestUtils.scala | 23 +++++++++---------- .../scala/org/apache/spark/DriverSuite.scala | 2 +- .../spark/deploy/SparkSubmitSuite.scala | 4 ++-- .../WholeStageCodegenSparkSubmitSuite.scala | 2 +- .../spark/sql/hive/HiveSparkSubmitSuite.scala | 20 ++++++++-------- 5 files changed, 25 insertions(+), 26 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/TestUtils.scala b/core/src/main/scala/org/apache/spark/TestUtils.scala index 8708362f1dce8..736dd715c71cd 100644 --- a/core/src/main/scala/org/apache/spark/TestUtils.scala +++ b/core/src/main/scala/org/apache/spark/TestUtils.scala @@ -43,6 +43,7 @@ import com.google.common.io.{ByteStreams, Files} import org.apache.commons.lang3.StringUtils import org.apache.logging.log4j.LogManager import org.apache.logging.log4j.core.LoggerContext +import org.apache.logging.log4j.core.appender.ConsoleAppender import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilderFactory import org.eclipse.jetty.server.Handler import org.eclipse.jetty.server.Server @@ -418,19 +419,17 @@ private[spark] object TestUtils { } /** - * config a log4j properties used for testsuite + * config a log4j2 properties used for testsuite */ - def configTestLog4j(level: String): Unit = { - val configuration = ConfigurationBuilderFactory.newConfigurationBuilder() - .addRootProperty("rootLogger.level", s"$level") - .addRootProperty("rootLogger.appenderRef.stdout.ref", "console") - .addProperty("appender.console.type", "Console") - .addProperty("appender.console.name", "console") - .addProperty("appender.console.target", "SYSTEM_ERR") - .addProperty("appender.console.layout.type", "PatternLayout") - .addProperty("appender.console.layout.pattern", - "%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n") - .build() + def configTestLog4j2(level: String): Unit = { + val builder = ConfigurationBuilderFactory.newConfigurationBuilder() + val appenderBuilder = builder.newAppender("console", "CONSOLE") + .addAttribute("target", ConsoleAppender.Target.SYSTEM_ERR) + appenderBuilder.add(builder.newLayout("PatternLayout") + .addAttribute("pattern", "%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n")) + builder.add(appenderBuilder) + builder.add(builder.newRootLogger(s"$level").add(builder.newAppenderRef("console"))) + val configuration = builder.build() LogManager.getContext(false).asInstanceOf[LoggerContext].reconfigure(configuration) } diff --git a/core/src/test/scala/org/apache/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala index f58777584d0ae..124a138ccf10f 100644 --- a/core/src/test/scala/org/apache/spark/DriverSuite.scala +++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala @@ -51,7 +51,7 @@ class DriverSuite extends SparkFunSuite with TimeLimits { */ object DriverWithoutCleanup { def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val conf = new SparkConf val sc = new SparkContext(args(0), "DriverWithoutCleanup", conf) sc.parallelize(1 to 100, 4).count() diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index 19e4875512a65..aead72ea0fdb7 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -1520,7 +1520,7 @@ class SparkSubmitSuite object JarCreationTest extends Logging { def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val conf = new SparkConf() val sc = new SparkContext(conf) val result = sc.makeRDD(1 to 100, 10).mapPartitions { x => @@ -1544,7 +1544,7 @@ object JarCreationTest extends Logging { object SimpleApplicationTest { def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val conf = new SparkConf() val sc = new SparkContext(conf) val configs = Seq("spark.master", "spark.app.name") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSparkSubmitSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSparkSubmitSuite.scala index ffbdc3f64195f..5e0318d97ff94 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSparkSubmitSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSparkSubmitSuite.scala @@ -71,7 +71,7 @@ object WholeStageCodegenSparkSubmitSuite extends Assertions with Logging { var spark: SparkSession = _ def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") spark = SparkSession.builder().getOrCreate() diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala index 90752e70e1b57..170cf4898f314 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala @@ -371,7 +371,7 @@ class HiveSparkSubmitSuite object SetMetastoreURLTest extends Logging { def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val sparkConf = new SparkConf(loadDefaults = true) val builder = SparkSession.builder() @@ -409,7 +409,7 @@ object SetMetastoreURLTest extends Logging { object SetWarehouseLocationTest extends Logging { def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val sparkConf = new SparkConf(loadDefaults = true).set(UI_ENABLED, false) val providedExpectedWarehouseLocation = @@ -489,7 +489,7 @@ object SetWarehouseLocationTest extends Logging { // can load the jar defined with the function. object TemporaryHiveUDFTest extends Logging { def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val conf = new SparkConf() conf.set(UI_ENABLED, false) val sc = new SparkContext(conf) @@ -527,7 +527,7 @@ object TemporaryHiveUDFTest extends Logging { // can load the jar defined with the function. object PermanentHiveUDFTest1 extends Logging { def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val conf = new SparkConf() conf.set(UI_ENABLED, false) val sc = new SparkContext(conf) @@ -565,7 +565,7 @@ object PermanentHiveUDFTest1 extends Logging { // can load the jar defined with the function. object PermanentHiveUDFTest2 extends Logging { def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val conf = new SparkConf() conf.set(UI_ENABLED, false) val sc = new SparkContext(conf) @@ -600,7 +600,7 @@ object PermanentHiveUDFTest2 extends Logging { // We test if we can load user jars in both driver and executors when HiveContext is used. object SparkSubmitClassLoaderTest extends Logging { def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val conf = new SparkConf() val hiveWarehouseLocation = Utils.createTempDir() conf.set(UI_ENABLED, false) @@ -670,7 +670,7 @@ object SparkSubmitClassLoaderTest extends Logging { // We test if we can correctly set spark sql configurations when HiveContext is used. object SparkSQLConfTest extends Logging { def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") // We override the SparkConf to add spark.sql.hive.metastore.version and // spark.sql.hive.metastore.jars to the beginning of the conf entry array. // So, if metadataHive get initialized after we set spark.sql.hive.metastore.version but @@ -711,7 +711,7 @@ object SPARK_9757 extends QueryTest { protected var spark: SparkSession = _ def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val hiveWarehouseLocation = Utils.createTempDir() val sparkContext = new SparkContext( @@ -760,7 +760,7 @@ object SPARK_11009 extends QueryTest { protected var spark: SparkSession = _ def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val sparkContext = new SparkContext( new SparkConf() @@ -791,7 +791,7 @@ object SPARK_14244 extends QueryTest { protected var spark: SparkSession = _ def main(args: Array[String]): Unit = { - TestUtils.configTestLog4j("INFO") + TestUtils.configTestLog4j2("INFO") val sparkContext = new SparkContext( new SparkConf() From cc060762428c078346098af65d924c4bafb4c120 Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Wed, 5 Jan 2022 14:52:51 +0800 Subject: [PATCH 3/3] fix viirya's commnets --- core/src/main/scala/org/apache/spark/TestUtils.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/TestUtils.scala b/core/src/main/scala/org/apache/spark/TestUtils.scala index 736dd715c71cd..20159afc51a6c 100644 --- a/core/src/main/scala/org/apache/spark/TestUtils.scala +++ b/core/src/main/scala/org/apache/spark/TestUtils.scala @@ -428,7 +428,7 @@ private[spark] object TestUtils { appenderBuilder.add(builder.newLayout("PatternLayout") .addAttribute("pattern", "%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n")) builder.add(appenderBuilder) - builder.add(builder.newRootLogger(s"$level").add(builder.newAppenderRef("console"))) + builder.add(builder.newRootLogger(level).add(builder.newAppenderRef("console"))) val configuration = builder.build() LogManager.getContext(false).asInstanceOf[LoggerContext].reconfigure(configuration) }