|
| 1 | +/* |
| 2 | + * Licensed to the Apache Software Foundation (ASF) under one or more |
| 3 | + * contributor license agreements. See the NOTICE file distributed with |
| 4 | + * this work for additional information regarding copyright ownership. |
| 5 | + * The ASF licenses this file to You under the Apache License, Version 2.0 |
| 6 | + * (the "License"); you may not use this file except in compliance with |
| 7 | + * the License. You may obtain a copy of the License at |
| 8 | + * |
| 9 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | + * |
| 11 | + * Unless required by applicable law or agreed to in writing, software |
| 12 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | + * See the License for the specific language governing permissions and |
| 15 | + * limitations under the License. |
| 16 | + */ |
| 17 | + |
| 18 | +package org.apache.spark.streaming |
| 19 | + |
| 20 | +import java.io.NotSerializableException |
| 21 | + |
| 22 | +import org.scalatest.{BeforeAndAfterAll, FunSuite} |
| 23 | + |
| 24 | +import org.apache.spark.{HashPartitioner, SparkContext, SparkException} |
| 25 | +import org.apache.spark.rdd.RDD |
| 26 | +import org.apache.spark.streaming.dstream.DStream |
| 27 | +import org.apache.spark.util.ReturnStatementInClosureException |
| 28 | + |
| 29 | +/** |
| 30 | + * Test that closures passed to DStream operations are actually cleaned. |
| 31 | + */ |
| 32 | +class DStreamClosureSuite extends FunSuite with BeforeAndAfterAll { |
| 33 | + private var ssc: StreamingContext = null |
| 34 | + |
| 35 | + override def beforeAll(): Unit = { |
| 36 | + val sc = new SparkContext("local", "test") |
| 37 | + ssc = new StreamingContext(sc, Seconds(1)) |
| 38 | + } |
| 39 | + |
| 40 | + override def afterAll(): Unit = { |
| 41 | + ssc.stop(stopSparkContext = true) |
| 42 | + ssc = null |
| 43 | + } |
| 44 | + |
| 45 | + test("user provided closures are actually cleaned") { |
| 46 | + val dstream = new DummyInputDStream(ssc) |
| 47 | + val pairDstream = dstream.map { i => (i, i) } |
| 48 | + // DStream |
| 49 | + testMap(dstream) |
| 50 | + testFlatMap(dstream) |
| 51 | + testFilter(dstream) |
| 52 | + testMapPartitions(dstream) |
| 53 | + testReduce(dstream) |
| 54 | + testForeach(dstream) |
| 55 | + testForeachRDD(dstream) |
| 56 | + testTransform(dstream) |
| 57 | + testTransformWith(dstream) |
| 58 | + testReduceByWindow(dstream) |
| 59 | + // PairDStreamFunctions |
| 60 | + testReduceByKey(pairDstream) |
| 61 | + testCombineByKey(pairDstream) |
| 62 | + testReduceByKeyAndWindow(pairDstream) |
| 63 | + testUpdateStateByKey(pairDstream) |
| 64 | + testMapValues(pairDstream) |
| 65 | + testFlatMapValues(pairDstream) |
| 66 | + // StreamingContext |
| 67 | + testTransform2(ssc, dstream) |
| 68 | + } |
| 69 | + |
| 70 | + /** |
| 71 | + * Verify that the expected exception is thrown. |
| 72 | + * |
| 73 | + * We use return statements as an indication that a closure is actually being cleaned. |
| 74 | + * We expect closure cleaner to find the return statements in the user provided closures. |
| 75 | + */ |
| 76 | + private def expectCorrectException(body: => Unit): Unit = { |
| 77 | + try { |
| 78 | + body |
| 79 | + } catch { |
| 80 | + case rse: ReturnStatementInClosureException => // Success! |
| 81 | + case e @ (_: NotSerializableException | _: SparkException) => |
| 82 | + throw new TestException( |
| 83 | + s"Expected ReturnStatementInClosureException, but got $e.\n" + |
| 84 | + "This means the closure provided by user is not actually cleaned.") |
| 85 | + } |
| 86 | + } |
| 87 | + |
| 88 | + // DStream operations |
| 89 | + private def testMap(ds: DStream[Int]): Unit = expectCorrectException { |
| 90 | + ds.map { _ => return; 1 } |
| 91 | + } |
| 92 | + private def testFlatMap(ds: DStream[Int]): Unit = expectCorrectException { |
| 93 | + ds.flatMap { _ => return; Seq.empty } |
| 94 | + } |
| 95 | + private def testFilter(ds: DStream[Int]): Unit = expectCorrectException { |
| 96 | + ds.filter { _ => return; true } |
| 97 | + } |
| 98 | + private def testMapPartitions(ds: DStream[Int]): Unit = expectCorrectException { |
| 99 | + ds.mapPartitions { _ => return; Seq.empty.toIterator } |
| 100 | + } |
| 101 | + private def testReduce(ds: DStream[Int]): Unit = expectCorrectException { |
| 102 | + ds.reduce { case (_, _) => return; 1 } |
| 103 | + } |
| 104 | + private def testForeach(ds: DStream[Int]): Unit = { |
| 105 | + val foreachF1 = (rdd: RDD[Int], t: Time) => return |
| 106 | + val foreachF2 = (rdd: RDD[Int]) => return |
| 107 | + expectCorrectException { ds.foreach(foreachF1) } |
| 108 | + expectCorrectException { ds.foreach(foreachF2) } |
| 109 | + } |
| 110 | + private def testForeachRDD(ds: DStream[Int]): Unit = { |
| 111 | + val foreachRDDF1 = (rdd: RDD[Int], t: Time) => return |
| 112 | + val foreachRDDF2 = (rdd: RDD[Int]) => return |
| 113 | + expectCorrectException { ds.foreachRDD(foreachRDDF1) } |
| 114 | + expectCorrectException { ds.foreachRDD(foreachRDDF2) } |
| 115 | + } |
| 116 | + private def testTransform(ds: DStream[Int]): Unit = { |
| 117 | + val transformF1 = (rdd: RDD[Int]) => { return; rdd } |
| 118 | + val transformF2 = (rdd: RDD[Int], time: Time) => { return; rdd } |
| 119 | + expectCorrectException { ds.transform(transformF1) } |
| 120 | + expectCorrectException { ds.transform(transformF2) } |
| 121 | + } |
| 122 | + private def testTransformWith(ds: DStream[Int]): Unit = { |
| 123 | + val transformF1 = (rdd1: RDD[Int], rdd2: RDD[Int]) => { return; rdd1 } |
| 124 | + val transformF2 = (rdd1: RDD[Int], rdd2: RDD[Int], time: Time) => { return; rdd2 } |
| 125 | + expectCorrectException { ds.transformWith(ds, transformF1) } |
| 126 | + expectCorrectException { ds.transformWith(ds, transformF2) } |
| 127 | + } |
| 128 | + private def testReduceByWindow(ds: DStream[Int]): Unit = { |
| 129 | + val reduceF = (_: Int, _: Int) => { return; 1 } |
| 130 | + expectCorrectException { ds.reduceByWindow(reduceF, Seconds(1), Seconds(2)) } |
| 131 | + expectCorrectException { ds.reduceByWindow(reduceF, reduceF, Seconds(1), Seconds(2)) } |
| 132 | + } |
| 133 | + |
| 134 | + // PairDStreamFunctions operations |
| 135 | + private def testReduceByKey(ds: DStream[(Int, Int)]): Unit = { |
| 136 | + val reduceF = (_: Int, _: Int) => { return; 1 } |
| 137 | + expectCorrectException { ds.reduceByKey(reduceF) } |
| 138 | + expectCorrectException { ds.reduceByKey(reduceF, 5) } |
| 139 | + expectCorrectException { ds.reduceByKey(reduceF, new HashPartitioner(5)) } |
| 140 | + } |
| 141 | + private def testCombineByKey(ds: DStream[(Int, Int)]): Unit = { |
| 142 | + expectCorrectException { |
| 143 | + ds.combineByKey[Int]( |
| 144 | + { _: Int => return; 1 }, |
| 145 | + { case (_: Int, _: Int) => return; 1 }, |
| 146 | + { case (_: Int, _: Int) => return; 1 }, |
| 147 | + new HashPartitioner(5) |
| 148 | + ) |
| 149 | + } |
| 150 | + } |
| 151 | + private def testReduceByKeyAndWindow(ds: DStream[(Int, Int)]): Unit = { |
| 152 | + val reduceF = (_: Int, _: Int) => { return; 1 } |
| 153 | + val filterF = (_: (Int, Int)) => { return; false } |
| 154 | + expectCorrectException { ds.reduceByKeyAndWindow(reduceF, Seconds(1)) } |
| 155 | + expectCorrectException { ds.reduceByKeyAndWindow(reduceF, Seconds(1), Seconds(2)) } |
| 156 | + expectCorrectException { ds.reduceByKeyAndWindow(reduceF, Seconds(1), Seconds(2), 5) } |
| 157 | + expectCorrectException { |
| 158 | + ds.reduceByKeyAndWindow(reduceF, Seconds(1), Seconds(2), new HashPartitioner(5)) |
| 159 | + } |
| 160 | + expectCorrectException { ds.reduceByKeyAndWindow(reduceF, reduceF, Seconds(2)) } |
| 161 | + expectCorrectException { |
| 162 | + ds.reduceByKeyAndWindow( |
| 163 | + reduceF, reduceF, Seconds(2), Seconds(3), new HashPartitioner(5), filterF) |
| 164 | + } |
| 165 | + } |
| 166 | + private def testUpdateStateByKey(ds: DStream[(Int, Int)]): Unit = { |
| 167 | + val updateF1 = (_: Seq[Int], _: Option[Int]) => { return; Some(1) } |
| 168 | + val updateF2 = (_: Iterator[(Int, Seq[Int], Option[Int])]) => { return; Seq((1, 1)).toIterator } |
| 169 | + val initialRDD = ds.ssc.sparkContext.emptyRDD[Int].map { i => (i, i) } |
| 170 | + expectCorrectException { ds.updateStateByKey(updateF1) } |
| 171 | + expectCorrectException { ds.updateStateByKey(updateF1, 5) } |
| 172 | + expectCorrectException { ds.updateStateByKey(updateF1, new HashPartitioner(5)) } |
| 173 | + expectCorrectException { |
| 174 | + ds.updateStateByKey(updateF1, new HashPartitioner(5), initialRDD) |
| 175 | + } |
| 176 | + expectCorrectException { |
| 177 | + ds.updateStateByKey(updateF2, new HashPartitioner(5), true) |
| 178 | + } |
| 179 | + expectCorrectException { |
| 180 | + ds.updateStateByKey(updateF2, new HashPartitioner(5), true, initialRDD) |
| 181 | + } |
| 182 | + } |
| 183 | + private def testMapValues(ds: DStream[(Int, Int)]): Unit = expectCorrectException { |
| 184 | + ds.mapValues { _ => return; 1 } |
| 185 | + } |
| 186 | + private def testFlatMapValues(ds: DStream[(Int, Int)]): Unit = expectCorrectException { |
| 187 | + ds.flatMapValues { _ => return; Seq.empty } |
| 188 | + } |
| 189 | + |
| 190 | + // StreamingContext operations |
| 191 | + private def testTransform2(ssc: StreamingContext, ds: DStream[Int]): Unit = { |
| 192 | + val transformF = (rdds: Seq[RDD[_]], time: Time) => { return; ssc.sparkContext.emptyRDD[Int] } |
| 193 | + expectCorrectException { ssc.transform(Seq(ds), transformF) } |
| 194 | + } |
| 195 | + |
| 196 | +} |
0 commit comments