From c6c0b7a2c102de94886e3c35d17bcb11e73488b0 Mon Sep 17 00:00:00 2001 From: William Benton Date: Mon, 7 Jul 2014 16:00:58 -0500 Subject: [PATCH] Ensure language.postfixOps is in scope where used Previously, language.postfixOps was imported at toplevel, which meant compiler warnings since it wasn't visible inside the classes that used postfix operations. This commit moves the import to suppress these warnings. --- core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala | 3 ++- .../test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala | 2 +- core/src/test/scala/org/apache/spark/ui/UISuite.scala | 2 +- .../org/apache/spark/streaming/NetworkReceiverSuite.scala | 2 +- .../org/apache/spark/streaming/StreamingListenerSuite.scala | 2 +- .../src/test/scala/org/apache/spark/streaming/UISuite.scala | 2 +- 6 files changed, 7 insertions(+), 6 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala index 4bc4346c0a288..226400945953b 100644 --- a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala +++ b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala @@ -21,7 +21,6 @@ import java.lang.ref.WeakReference import scala.collection.mutable.{HashSet, SynchronizedSet} import scala.language.existentials -import scala.language.postfixOps import scala.util.Random import org.scalatest.{BeforeAndAfter, FunSuite} @@ -47,6 +46,8 @@ import org.apache.spark.storage.ShuffleIndexBlockId abstract class ContextCleanerSuiteBase(val shuffleManager: Class[_] = classOf[HashShuffleManager]) extends FunSuite with BeforeAndAfter with LocalSparkContext { + import scala.language.postfixOps + implicit val defaultTimeout = timeout(10000 millis) val conf = new SparkConf() .setMaster("local[2]") diff --git a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala index 28197657e9bad..e4b6d1c18b4f6 100644 --- a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala @@ -22,7 +22,6 @@ import java.util.concurrent.Semaphore import scala.concurrent.{Await, TimeoutException} import scala.concurrent.duration.Duration import scala.concurrent.ExecutionContext.Implicits.global -import scala.language.postfixOps import org.scalatest.{BeforeAndAfterAll, FunSuite} import org.scalatest.concurrent.Timeouts @@ -32,6 +31,7 @@ import org.apache.spark.SparkContext._ import org.apache.spark.{SparkContext, SparkException, LocalSparkContext} class AsyncRDDActionsSuite extends FunSuite with BeforeAndAfterAll with Timeouts { + import scala.language.postfixOps @transient private var sc: SparkContext = _ diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala index 038746d2eda4b..2a77642f6f844 100644 --- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala +++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala @@ -21,7 +21,6 @@ import java.net.ServerSocket import javax.servlet.http.HttpServletRequest import scala.io.Source -import scala.language.postfixOps import scala.util.{Failure, Success, Try} import org.eclipse.jetty.server.Server @@ -35,6 +34,7 @@ import org.apache.spark.LocalSparkContext._ import scala.xml.Node class UISuite extends FunSuite { + import scala.language.postfixOps ignore("basic ui visibility") { withSpark(new SparkContext("local", "test")) { sc => diff --git a/streaming/src/test/scala/org/apache/spark/streaming/NetworkReceiverSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/NetworkReceiverSuite.scala index f4e11f975de94..cf52475778c9e 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/NetworkReceiverSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/NetworkReceiverSuite.scala @@ -20,7 +20,6 @@ package org.apache.spark.streaming import java.nio.ByteBuffer import scala.collection.mutable.ArrayBuffer -import scala.language.postfixOps import org.apache.spark.SparkConf import org.apache.spark.storage.{StorageLevel, StreamBlockId} @@ -32,6 +31,7 @@ import org.scalatest.time.SpanSugar._ /** Testsuite for testing the network receiver behavior */ class NetworkReceiverSuite extends FunSuite with Timeouts { + import scala.language.postfixOps test("network receiver life cycle") { diff --git a/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala index 2861f5335ae36..cc8ae3d0c5adf 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala @@ -20,7 +20,6 @@ package org.apache.spark.streaming import scala.collection.mutable.ArrayBuffer import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global -import scala.language.postfixOps import org.apache.spark.storage.StorageLevel import org.apache.spark.streaming.dstream.DStream @@ -33,6 +32,7 @@ import org.scalatest.time.SpanSugar._ import org.apache.spark.Logging class StreamingListenerSuite extends TestSuiteBase with Matchers { + import scala.language.postfixOps val input = (1 to 4).map(Seq(_)).toSeq val operation = (d: DStream[Int]) => d.map(x => x) diff --git a/streaming/src/test/scala/org/apache/spark/streaming/UISuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/UISuite.scala index 2a0db7564915d..92e6bb71336f0 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/UISuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/UISuite.scala @@ -18,13 +18,13 @@ package org.apache.spark.streaming import scala.io.Source -import scala.language.postfixOps import org.scalatest.FunSuite import org.scalatest.concurrent.Eventually._ import org.scalatest.time.SpanSugar._ class UISuite extends FunSuite { + import scala.language.postfixOps // Ignored: See SPARK-1530 ignore("streaming tab in spark UI") {