diff --git a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
index 022fe91edade..2a78a51e95e0 100644
--- a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
+++ b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
@@ -28,8 +28,8 @@ import scala.util.{Failure, Success, Try}
import com.google.common.io.CharStreams
import org.mockito.Mockito._
-import org.scalatest.mock.MockitoSugar
-import org.scalatest.ShouldMatchers
+import org.scalatest.mockito.MockitoSugar
+import org.scalatest.Matchers
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.network.{BlockDataManager, BlockTransferService}
@@ -37,7 +37,7 @@ import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer}
import org.apache.spark.network.shuffle.BlockFetchingListener
import org.apache.spark.storage.{BlockId, ShuffleBlockId}
-class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar with ShouldMatchers {
+class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar with Matchers {
test("security default off") {
val conf = new SparkConf()
.set("spark.app.id", "app-id")
@@ -78,7 +78,7 @@ class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar wi
val conf1 = conf0.clone.set("spark.authenticate", "false")
testConnection(conf0, conf1) match {
case Success(_) => fail("Should have failed")
- case Failure(t) => // any funny error may occur, sever will interpret SASL token as RPC
+ case Failure(_) => // any funny error may occur, sever will interpret SASL token as RPC
}
}
diff --git a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
index 121447a96529..4912ad4fd08a 100644
--- a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
+++ b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
@@ -23,13 +23,12 @@ import org.mockito.Mockito.mock
import org.scalatest._
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
-import org.apache.spark.internal.config._
import org.apache.spark.network.BlockDataManager
class NettyBlockTransferServiceSuite
extends SparkFunSuite
with BeforeAndAfterEach
- with ShouldMatchers {
+ with Matchers {
private var service0: NettyBlockTransferService = _
private var service1: NettyBlockTransferService = _
diff --git a/dev/deps/spark-deps-hadoop-2.2 b/dev/deps/spark-deps-hadoop-2.2
index 9cbab3d89546..7f48def2a89d 100644
--- a/dev/deps/spark-deps-hadoop-2.2
+++ b/dev/deps/spark-deps-hadoop-2.2
@@ -145,7 +145,7 @@ scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
-scala-xml_2.11-1.0.2.jar
+scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.0.0.jar
slf4j-api-1.7.16.jar
diff --git a/dev/deps/spark-deps-hadoop-2.3 b/dev/deps/spark-deps-hadoop-2.3
index 63ce6c66fd72..41b7d448da21 100644
--- a/dev/deps/spark-deps-hadoop-2.3
+++ b/dev/deps/spark-deps-hadoop-2.3
@@ -152,7 +152,7 @@ scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
-scala-xml_2.11-1.0.2.jar
+scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.0.0.jar
slf4j-api-1.7.16.jar
diff --git a/dev/deps/spark-deps-hadoop-2.4 b/dev/deps/spark-deps-hadoop-2.4
index 122d5c27d0d4..274e5386e37c 100644
--- a/dev/deps/spark-deps-hadoop-2.4
+++ b/dev/deps/spark-deps-hadoop-2.4
@@ -152,7 +152,7 @@ scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
-scala-xml_2.11-1.0.2.jar
+scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.0.0.jar
slf4j-api-1.7.16.jar
diff --git a/dev/deps/spark-deps-hadoop-2.6 b/dev/deps/spark-deps-hadoop-2.6
index 776aabd1118a..5dd8cf3a4b75 100644
--- a/dev/deps/spark-deps-hadoop-2.6
+++ b/dev/deps/spark-deps-hadoop-2.6
@@ -160,7 +160,7 @@ scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
-scala-xml_2.11-1.0.2.jar
+scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.0.0.jar
slf4j-api-1.7.16.jar
diff --git a/dev/deps/spark-deps-hadoop-2.7 b/dev/deps/spark-deps-hadoop-2.7
index 524e8240737b..42919b404020 100644
--- a/dev/deps/spark-deps-hadoop-2.7
+++ b/dev/deps/spark-deps-hadoop-2.7
@@ -161,7 +161,7 @@ scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
-scala-xml_2.11-1.0.2.jar
+scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.0.0.jar
slf4j-api-1.7.16.jar
diff --git a/pom.xml b/pom.xml
index a0c44f5ac1f4..e03323da85da 100644
--- a/pom.xml
+++ b/pom.xml
@@ -708,7 +708,7 @@
org.scalatest
scalatest_${scala.binary.version}
- 2.2.6
+ 3.0.1
test
@@ -720,7 +720,7 @@
org.scalacheck
scalacheck_${scala.binary.version}
- 1.12.5
+ 1.13.4
test
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 817de48de279..df0b7d37948e 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -17,18 +17,18 @@
package org.apache.spark.sql.catalyst.analysis
-import org.scalatest.ShouldMatchers
+import org.scalatest.Matchers
-import org.apache.spark.sql.catalyst.{SimpleCatalystConf, TableIdentifier}
+import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
-import org.apache.spark.sql.catalyst.plans.{Cross, Inner}
+import org.apache.spark.sql.catalyst.plans.Cross
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.types._
-class AnalysisSuite extends AnalysisTest with ShouldMatchers {
+class AnalysisSuite extends AnalysisTest with Matchers {
import org.apache.spark.sql.catalyst.analysis.TestRelations._
test("union project *") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ExchangeCoordinatorSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ExchangeCoordinatorSuite.scala
index 06bce9a2400e..21fac4ccb178 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ExchangeCoordinatorSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ExchangeCoordinatorSuite.scala
@@ -250,7 +250,7 @@ class ExchangeCoordinatorSuite extends SparkFunSuite with BeforeAndAfterAll {
}
def withSparkSession(
- f: SparkSession => Unit,
+ f: SparkSession => Any,
targetNumPostShufflePartitions: Int,
minNumPostShufflePartitions: Option[Int]): Unit = {
val sparkConf =
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
index 4bec52b9fe4f..615c3130a062 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
@@ -480,8 +480,8 @@ class BatchedWriteAheadLogSuite extends CommonWriteAheadLogTests(
}
// we make the write requests in separate threads so that we don't block the test thread
- private def writeAsync(wal: WriteAheadLog, event: String, time: Long): Promise[Unit] = {
- val p = Promise[Unit]()
+ private def writeAsync(wal: WriteAheadLog, event: String, time: Long): Promise[Any] = {
+ val p = Promise[Any]()
p.completeWith(Future {
val v = wal.write(event, time)
assert(v === walHandle)