Skip to content

Commit cea2ddc

Browse files
committed
add docs
1 parent b5d621b commit cea2ddc

File tree

2 files changed

+4
-0
lines changed

2 files changed

+4
-0
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousDataSourceRDDIter.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ class ContinuousDataSourceRDD(
5252
}
5353

5454
override def compute(split: Partition, context: TaskContext): Iterator[UnsafeRow] = {
55+
// If attempt number isn't 0, this is a task retry, which we don't support.
5556
if (context.attemptNumber() != 0) {
5657
throw new ContinuousTaskRetryException()
5758
}

sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousTaskRetryException.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,5 +19,8 @@ package org.apache.spark.sql.execution.streaming.continuous
1919

2020
import org.apache.spark.SparkException
2121

22+
/**
23+
* An exception thrown when a continuous processing task runs with a nonzero attempt ID.
24+
*/
2225
class ContinuousTaskRetryException
2326
extends SparkException("Continuous execution does not support task retry", null)

0 commit comments

Comments
 (0)