diff --git a/common/utils/pom.xml b/common/utils/pom.xml
index 6067ec40ecddb..abcadd83fc566 100644
--- a/common/utils/pom.xml
+++ b/common/utils/pom.xml
@@ -51,11 +51,6 @@
com.fasterxml.jackson.module
jackson-module-scala_${scala.binary.version}
-
- commons-io
- commons-io
- test
-
org.apache.ivy
ivy
diff --git a/common/utils/src/test/scala/org/apache/spark/util/LogKeySuite.scala b/common/utils/src/test/scala/org/apache/spark/util/LogKeySuite.scala
index 17e360f510a24..742d4066ffab2 100644
--- a/common/utils/src/test/scala/org/apache/spark/util/LogKeySuite.scala
+++ b/common/utils/src/test/scala/org/apache/spark/util/LogKeySuite.scala
@@ -17,14 +17,12 @@
package org.apache.spark.util
-import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path}
import java.util.{ArrayList => JList}
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe._
-import org.apache.commons.io.FileUtils
import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite
import org.apache.spark.internal.{Logging, LogKeys}
@@ -61,9 +59,8 @@ class LogKeySuite
private def regenerateLogKeyFile(
originalKeys: Seq[String], sortedKeys: Seq[String]): Unit = {
if (originalKeys != sortedKeys) {
- val logKeyFile = logKeyFilePath.toFile
- logInfo(s"Regenerating the file $logKeyFile")
- val originalContents = FileUtils.readLines(logKeyFile, StandardCharsets.UTF_8)
+ logInfo(s"Regenerating the file $logKeyFilePath")
+ val originalContents = Files.readAllLines(logKeyFilePath)
val sortedContents = new JList[String]()
var firstMatch = false
originalContents.asScala.foreach { line =>
@@ -78,8 +75,8 @@ class LogKeySuite
sortedContents.add(line)
}
}
- Files.delete(logKeyFile.toPath)
- FileUtils.writeLines(logKeyFile, StandardCharsets.UTF_8.name(), sortedContents)
+ Files.delete(logKeyFilePath)
+ Files.write(logKeyFilePath, sortedContents)
}
}
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index 7c05199d02b57..740cf48a2f765 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -282,6 +282,16 @@ This file is divided into 3 sections:
scala.jdk.CollectionConverters._ and use .asScala / .asJava methods
+
+ FileUtils\.readLines
+ Use Files.readAllLines instead.
+
+
+
+ FileUtils\.writeLines
+ Use Files.write instead.
+
+
FileUtils\.deleteDirectory
Use deleteRecursively of SparkFileUtils or Utils