Skip to content

Commit 75d795c

Browse files
committed
change 'NetworkInputDStream' to 'ReceiverInputDStream' && change 'ReceiverInputTracker' to 'ReceiverTracker'
1 parent 90a6a46 commit 75d795c

File tree

6 files changed

+9
-9
lines changed

6 files changed

+9
-9
lines changed

examples/src/main/scala/org/apache/spark/examples/streaming/StatefulNetworkWordCount.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ object StatefulNetworkWordCount {
6464
// Initial RDD input to updateStateByKey
6565
val initialRDD = ssc.sparkContext.parallelize(List(("hello", 1), ("world", 1)))
6666

67-
// Create a NetworkInputDStream on target ip:port and count the
67+
// Create a ReceiverInputDStream on target ip:port and count the
6868
// words in input stream of \n delimited test (eg. generated by 'nc')
6969
val lines = ssc.socketTextStream(args(0), args(1).toInt)
7070
val words = lines.flatMap(_.split(" "))

examples/src/main/scala/org/apache/spark/examples/streaming/clickstream/PageViewStream.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ object PageViewStream {
5050
val ssc = new StreamingContext("local[2]", "PageViewStream", Seconds(1),
5151
System.getenv("SPARK_HOME"), StreamingContext.jarOfClass(this.getClass).toSeq)
5252

53-
// Create a NetworkInputDStream on target host:port and convert each line to a PageView
53+
// Create a ReceiverInputDStream on target host:port and convert each line to a PageView
5454
val pageViews = ssc.socketTextStream(host, port)
5555
.flatMap(_.split("\n"))
5656
.map(PageView.fromString(_))

streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ class StreamingContext private[streaming] (
187187
/**
188188
* Set each DStreams in this context to remember RDDs it generated in the last given duration.
189189
* DStreams remember RDDs only for a limited duration of time and releases them for garbage
190-
* collection. This method allows the developer to specify how to long to remember the RDDs (
190+
* collection. This method allows the developer to specify how long to remember the RDDs (
191191
* if the developer wishes to query old data outside the DStream computation).
192192
* @param duration Minimum duration that each DStream should remember its RDDs
193193
*/

streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -479,7 +479,7 @@ class JavaStreamingContext(val ssc: StreamingContext) extends Closeable {
479479
/**
480480
* Sets each DStreams in this context to remember RDDs it generated in the last given duration.
481481
* DStreams remember RDDs only for a limited duration of duration and releases them for garbage
482-
* collection. This method allows the developer to specify how to long to remember the RDDs (
482+
* collection. This method allows the developer to specify how long to remember the RDDs (
483483
* if the developer wishes to query old data outside the DStream computation).
484484
* @param duration Minimum duration that each DStream should remember its RDDs
485485
*/

streaming/src/main/scala/org/apache/spark/streaming/dstream/ReceiverInputDStream.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import org.apache.spark.streaming.scheduler.ReceivedBlockInfo
2929
/**
3030
* Abstract class for defining any [[org.apache.spark.streaming.dstream.InputDStream]]
3131
* that has to start a receiver on worker nodes to receive external data.
32-
* Specific implementations of NetworkInputDStream must
32+
* Specific implementations of ReceiverInputDStream must
3333
* define `the getReceiver()` function that gets the receiver object of type
3434
* [[org.apache.spark.streaming.receiver.Receiver]] that will be sent
3535
* to the workers to receive data.
@@ -39,17 +39,17 @@ import org.apache.spark.streaming.scheduler.ReceivedBlockInfo
3939
abstract class ReceiverInputDStream[T: ClassTag](@transient ssc_ : StreamingContext)
4040
extends InputDStream[T](ssc_) {
4141

42-
/** This is an unique identifier for the network input stream. */
42+
/** This is an unique identifier for the receiver input stream. */
4343
val id = ssc.getNewReceiverStreamId()
4444

4545
/**
4646
* Gets the receiver object that will be sent to the worker nodes
4747
* to receive data. This method needs to defined by any specific implementation
48-
* of a NetworkInputDStream.
48+
* of a ReceiverInputDStream.
4949
*/
5050
def getReceiver(): Receiver[T]
5151

52-
// Nothing to start or stop as both taken care of by the ReceiverInputTracker.
52+
// Nothing to start or stop as both taken care of by the ReceiverTracker.
5353
def start() {}
5454

5555
def stop() {}

streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ private[streaming] case class DeregisterReceiver(streamId: Int, msg: String, err
4646
extends ReceiverTrackerMessage
4747

4848
/**
49-
* This class manages the execution of the receivers of NetworkInputDStreams. Instance of
49+
* This class manages the execution of the receivers of ReceiverInputDStreams. Instance of
5050
* this class must be created after all input streams have been added and StreamingContext.start()
5151
* has been called because it needs the final set of input streams at the time of instantiation.
5252
*

0 commit comments

Comments
 (0)