Skip to content

Commit 3617606

Browse files
wakunGitHub Enterprise
authored andcommitted
[CARMEL-6355][FOLLOWUP] Auto update table statistics based on write metrics (#1142)
1 parent d1c2c3a commit 3617606

File tree

1 file changed

+7
-3
lines changed

1 file changed

+7
-3
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/BasicWriteStatsTracker.scala

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -185,8 +185,12 @@ object BasicWriteJobStatsTracker {
185185
}
186186

187187
def getWriteStats(mode: SaveMode, metrics: Map[String, SQLMetric]): Option[WriteStats] = {
188-
val numBytes = metrics.get(NUM_OUTPUT_BYTES_KEY).map(_.value).map(BigInt(_))
189-
val numRows = metrics.get(NUM_OUTPUT_ROWS_KEY).map(_.value).map(BigInt(_))
190-
numBytes.map(WriteStats(mode, _, numRows))
188+
if (metrics.contains(NUM_OUTPUT_BYTES_KEY) && metrics.contains(NUM_OUTPUT_ROWS_KEY)) {
189+
val numBytes = metrics.get(NUM_OUTPUT_BYTES_KEY).map(_.value).map(BigInt(_))
190+
val numRows = metrics.get(NUM_OUTPUT_ROWS_KEY).map(_.value).map(BigInt(_))
191+
numBytes.map(WriteStats(mode, _, numRows))
192+
} else {
193+
None
194+
}
191195
}
192196
}

0 commit comments

Comments
 (0)