diff --git a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
index b64422465226..8baaa68692a8 100644
--- a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
@@ -18,9 +18,10 @@
package org.apache.spark.storage
import java.nio.{ByteBuffer, MappedByteBuffer}
+import java.nio.file.Files
import java.util.{Arrays, Random}
-import com.google.common.io.{ByteStreams, Files}
+import com.google.common.io.ByteStreams
import io.netty.channel.FileRegion
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
@@ -150,7 +151,7 @@ class DiskStoreSuite extends SparkFunSuite {
assert(diskStore.getSize(blockId) === testData.length)
- val diskData = Files.toByteArray(diskBlockManager.getFile(blockId.name))
+ val diskData = Files.readAllBytes(diskBlockManager.getFile(blockId.name).toPath)
assert(!Arrays.equals(testData, diskData))
val blockData = diskStore.getBytes(blockId)
diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStep.scala
index 462d70dee534..2941d3e9f9e7 100644
--- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStep.scala
+++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStep.scala
@@ -18,10 +18,11 @@ package org.apache.spark.deploy.k8s.features
import java.io.File
import java.nio.charset.StandardCharsets
+import java.nio.file.Files
import scala.jdk.CollectionConverters._
-import com.google.common.io.{BaseEncoding, Files}
+import com.google.common.io.BaseEncoding
import io.fabric8.kubernetes.api.model.{ContainerBuilder, HasMetadata, PodBuilder, Secret, SecretBuilder}
import org.apache.spark.deploy.k8s.{KubernetesConf, SparkPod}
@@ -153,7 +154,7 @@ private[spark] class DriverKubernetesCredentialsFeatureStep(kubernetesConf: Kube
.map { file =>
require(file.isFile, String.format("%s provided at %s does not exist or is not a file.",
fileType, file.getAbsolutePath))
- BaseEncoding.base64().encode(Files.toByteArray(file))
+ BaseEncoding.base64().encode(Files.readAllBytes(file.toPath))
}
}
diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStep.scala
index 032a5266b0f2..67133755ee1a 100644
--- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStep.scala
+++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStep.scala
@@ -17,11 +17,11 @@
package org.apache.spark.deploy.k8s.features
import java.io.File
+import java.nio.file.Files
import scala.jdk.CollectionConverters._
import scala.util.control.NonFatal
-import com.google.common.io.Files
import io.fabric8.kubernetes.api.model._
import org.apache.commons.codec.binary.Base64
import org.apache.hadoop.security.UserGroupInformation
@@ -235,7 +235,7 @@ private[spark] class KerberosConfDriverFeatureStep(kubernetesConf: KubernetesDri
.endMetadata()
.withImmutable(true)
.addToData(
- Map(file.getName() -> java.nio.file.Files.readString(file.toPath)).asJava)
+ Map(file.getName() -> Files.readString(file.toPath)).asJava)
.build()
}
} ++ {
@@ -247,7 +247,7 @@ private[spark] class KerberosConfDriverFeatureStep(kubernetesConf: KubernetesDri
.withName(ktSecretName)
.endMetadata()
.withImmutable(true)
- .addToData(kt.getName(), Base64.encodeBase64String(Files.toByteArray(kt)))
+ .addToData(kt.getName(), Base64.encodeBase64String(Files.readAllBytes(kt.toPath)))
.build())
} else {
Nil
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index 335bf09c6dec..3abcdc79d19b 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -282,6 +282,11 @@ This file is divided into 3 sections:
scala.jdk.CollectionConverters._ and use .asScala / .asJava methods
+
+ \bFiles\.toByteArray\b
+ Use java.nio.file.Files.readAllBytes instead.
+
+
\bFileUtils\.getTempDirectory\b
Use System.getProperty instead.
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
index 8aa9728bd8dc..8a32f07537ff 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
@@ -18,12 +18,12 @@
package org.apache.spark.streaming
import java.io._
+import java.nio.file.Files
import java.util.concurrent.ConcurrentLinkedQueue
import scala.jdk.CollectionConverters._
import scala.reflect.ClassTag
-import com.google.common.io.Files
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.io.{IntWritable, Text}
@@ -648,7 +648,7 @@ class CheckpointSuite extends TestSuiteBase with LocalStreamingContext with DStr
*/
def writeFile(i: Int, clock: Clock): Unit = {
val file = new File(testDir, i.toString)
- java.nio.file.Files.writeString(file.toPath, s"$i\n")
+ Files.writeString(file.toPath, s"$i\n")
assert(file.setLastModified(clock.getTimeMillis()))
// Check that the file's modification date is actually the value we wrote, since rounding or
// truncation will break the test:
@@ -878,8 +878,8 @@ class CheckpointSuite extends TestSuiteBase with LocalStreamingContext with DStr
assert(checkpointFiles.size === 2)
// Although bytes2 was written with an old time, it contains the latest status, so we should
// try to read from it at first.
- assert(Files.toByteArray(checkpointFiles(0)) === bytes2)
- assert(Files.toByteArray(checkpointFiles(1)) === bytes1)
+ assert(Files.readAllBytes(checkpointFiles(0).toPath) === bytes2)
+ assert(Files.readAllBytes(checkpointFiles(1).toPath) === bytes1)
checkpointWriter.stop()
}