-
Notifications
You must be signed in to change notification settings - Fork 28.9k
[SPARK-26379][SS] Fix issue on adding current_timestamp/current_date to streaming query #23609
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
afb0dc4
8f30a75
d09845b
687c3e4
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -508,12 +508,20 @@ class MicroBatchExecution( | |
| cd.dataType, cd.timeZoneId) | ||
| } | ||
|
|
||
| // Pre-resolve new attributes to ensure all attributes are resolved before | ||
| // accessing schema of logical plan. Note that it only leverages the information | ||
| // of attributes, so we don't need to concern about the value of literals. | ||
|
|
||
| val newAttrPlanPreResolvedForSchema = newAttributePlan transformAllExpressions { | ||
|
||
| case cbt: CurrentBatchTimestamp => cbt.toLiteral | ||
| } | ||
|
|
||
| val triggerLogicalPlan = sink match { | ||
| case _: Sink => newAttributePlan | ||
| case s: StreamingWriteSupportProvider => | ||
| val writer = s.createStreamingWriteSupport( | ||
| s"$runId", | ||
| newAttributePlan.schema, | ||
| newAttrPlanPreResolvedForSchema.schema, | ||
| outputMode, | ||
| new DataSourceOptions(extraOptions.asJava)) | ||
| WriteToDataSourceV2(new MicroBatchWrite(currentBatchId, writer), newAttributePlan) | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -34,6 +34,7 @@ import org.apache.spark.scheduler.{SparkListener, SparkListenerJobStart} | |
| import org.apache.spark.sql._ | ||
| import org.apache.spark.sql.catalyst.plans.logical.Range | ||
| import org.apache.spark.sql.catalyst.streaming.InternalOutputModes | ||
| import org.apache.spark.sql.catalyst.util.DateTimeUtils | ||
| import org.apache.spark.sql.execution.command.ExplainCommand | ||
| import org.apache.spark.sql.execution.streaming._ | ||
| import org.apache.spark.sql.execution.streaming.sources.ContinuousMemoryStream | ||
|
|
@@ -1079,6 +1080,51 @@ class StreamSuite extends StreamTest { | |
| assert(query.exception.isEmpty) | ||
| } | ||
| } | ||
|
|
||
| test("SPARK-26379 Structured Streaming - Exception on adding current_timestamp / current_date" + | ||
| " to Dataset - use v2 sink") { | ||
| testCurrentTimestampOnStreamingQuery(useV2Sink = true) | ||
| } | ||
|
|
||
| test("SPARK-26379 Structured Streaming - Exception on adding current_timestamp / current_date" + | ||
| " to Dataset - use v1 sink") { | ||
| testCurrentTimestampOnStreamingQuery(useV2Sink = false) | ||
| } | ||
|
|
||
| private def testCurrentTimestampOnStreamingQuery(useV2Sink: Boolean): Unit = { | ||
| val input = MemoryStream[Int] | ||
| val df = input.toDS() | ||
| .withColumn("cur_timestamp", lit(current_timestamp())) | ||
| .withColumn("cur_date", lit(current_date())) | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Currently, this hides |
||
|
|
||
| def assertBatchOutputAndUpdateLastTimestamp( | ||
| rows: Seq[Row], | ||
| curTimestamp: Long, | ||
| curDate: Int, | ||
| expectedValue: Int): Long = { | ||
| assert(rows.size === 1) | ||
| val row = rows.head | ||
| assert(row.getInt(0) === expectedValue) | ||
| assert(row.getTimestamp(1).getTime >= curTimestamp) | ||
| val days = DateTimeUtils.millisToDays(row.getDate(2).getTime) | ||
| assert(days == curDate || days == curDate + 1) | ||
| row.getTimestamp(1).getTime | ||
| } | ||
|
|
||
| var lastTimestamp = System.currentTimeMillis() | ||
| val currentDate = DateTimeUtils.millisToDays(lastTimestamp) | ||
| testStream(df, useV2Sink = useV2Sink) ( | ||
| AddData(input, 1), | ||
| CheckLastBatch { rows: Seq[Row] => | ||
| lastTimestamp = assertBatchOutputAndUpdateLastTimestamp(rows, lastTimestamp, currentDate, 1) | ||
| }, | ||
| Execute { _ => Thread.sleep(1000) }, | ||
| AddData(input, 2), | ||
| CheckLastBatch { rows: Seq[Row] => | ||
| lastTimestamp = assertBatchOutputAndUpdateLastTimestamp(rows, lastTimestamp, currentDate, 2) | ||
| } | ||
| ) | ||
| } | ||
| } | ||
|
|
||
| abstract class FakeSource extends StreamSourceProvider { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The root cause is
CurrentBatchTimestampisTimeZoneAwareExpressionwhich is unresolved without TimeZoneId.