Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,16 @@ import scala.util.{Failure, Success, Try}

import com.google.common.io.CharStreams
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.ShouldMatchers
import org.scalatest.mockito.MockitoSugar
import org.scalatest.Matchers

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.network.{BlockDataManager, BlockTransferService}
import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer}
import org.apache.spark.network.shuffle.BlockFetchingListener
import org.apache.spark.storage.{BlockId, ShuffleBlockId}

class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar with ShouldMatchers {
class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar with Matchers {
test("security default off") {
val conf = new SparkConf()
.set("spark.app.id", "app-id")
Expand Down Expand Up @@ -78,7 +78,7 @@ class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar wi
val conf1 = conf0.clone.set("spark.authenticate", "false")
testConnection(conf0, conf1) match {
case Success(_) => fail("Should have failed")
case Failure(t) => // any funny error may occur, sever will interpret SASL token as RPC
case Failure(_) => // any funny error may occur, sever will interpret SASL token as RPC
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,12 @@ import org.mockito.Mockito.mock
import org.scalatest._

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.internal.config._
import org.apache.spark.network.BlockDataManager

class NettyBlockTransferServiceSuite
extends SparkFunSuite
with BeforeAndAfterEach
with ShouldMatchers {
with Matchers {

private var service0: NettyBlockTransferService = _
private var service1: NettyBlockTransferService = _
Expand Down
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-2.2
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-xml_2.11-1.0.2.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.0.0.jar
slf4j-api-1.7.16.jar
Expand Down
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-2.3
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-xml_2.11-1.0.2.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.0.0.jar
slf4j-api-1.7.16.jar
Expand Down
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-2.4
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-xml_2.11-1.0.2.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.0.0.jar
slf4j-api-1.7.16.jar
Expand Down
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-2.6
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-xml_2.11-1.0.2.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.0.0.jar
slf4j-api-1.7.16.jar
Expand Down
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-2.7
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-xml_2.11-1.0.2.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.0.0.jar
slf4j-api-1.7.16.jar
Expand Down
4 changes: 2 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -708,7 +708,7 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>2.2.6</version>
<version>3.0.1</version>
<scope>test</scope>
</dependency>
<dependency>
Expand All @@ -720,7 +720,7 @@
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<version>1.12.5</version> <!-- 1.13.0 appears incompatible with scalatest 2.2.6 -->
<version>1.13.4</version>
Copy link
Member

@HyukjinKwon HyukjinKwon Dec 16, 2016

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi @jaceklaskowski, I just wonder if it is safe assuming from the comment beside, 1.13.0 appears incompatible with scalatest 2.2.6. I just read https://github.com/rickynils/scalacheck/issues/217 BTW.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks. The local build has passed so I'm now waiting for Jenkins to say how it liked the change (or not).

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I remember this didn't work. Jenkins runs SBT; are you checking Maven locally?
I'd like to update, sure, but want to make sure we don't trip over whatever I found before.

We could consider updating to scalatest 3.0.1 as well, in any event. As a test-only dependency it should be safe to more freely update it.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I did use maven for the entire local built and sbt for this particular DAGSchedulerSuite (that it all started from actually).

I've got little experience with Spark test suite and hence I'm relying on Jenkins to do the heavy-lifting and see how things may have changed since the comment.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sbt.ForkMain$ForkError: sbt.ForkMain$ForkError: java.lang.IncompatibleClassChangeError: Implementing class

You were both indeed right. Let me dig into it a bit more...

<scope>test</scope>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,18 @@

package org.apache.spark.sql.catalyst.analysis

import org.scalatest.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.sql.catalyst.{SimpleCatalystConf, TableIdentifier}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.{Cross, Inner}
import org.apache.spark.sql.catalyst.plans.Cross
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.types._


class AnalysisSuite extends AnalysisTest with ShouldMatchers {
class AnalysisSuite extends AnalysisTest with Matchers {
import org.apache.spark.sql.catalyst.analysis.TestRelations._

test("union project *") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ class ExchangeCoordinatorSuite extends SparkFunSuite with BeforeAndAfterAll {
}

def withSparkSession(
f: SparkSession => Unit,
f: SparkSession => Any,
targetNumPostShufflePartitions: Int,
minNumPostShufflePartitions: Option[Int]): Unit = {
val sparkConf =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -480,8 +480,8 @@ class BatchedWriteAheadLogSuite extends CommonWriteAheadLogTests(
}

// we make the write requests in separate threads so that we don't block the test thread
private def writeAsync(wal: WriteAheadLog, event: String, time: Long): Promise[Unit] = {
val p = Promise[Unit]()
private def writeAsync(wal: WriteAheadLog, event: String, time: Long): Promise[Any] = {
val p = Promise[Any]()
p.completeWith(Future {
val v = wal.write(event, time)
assert(v === walHandle)
Expand Down