Skip to content

Commit 0355ce4

Browse files
committed
SPARK-31710: TIMESTAMP_SECONDS, TIMESTAMP_MILLISECONDS and TIMESTAMP_MICROSECONDS to timestamp transfer
1 parent 097d509 commit 0355ce4

File tree

3 files changed

+111
-0
lines changed

3 files changed

+111
-0
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -424,6 +424,9 @@ object FunctionRegistry {
424424
expression[MakeInterval]("make_interval"),
425425
expression[DatePart]("date_part"),
426426
expression[Extract]("extract"),
427+
expression[SecondsToTimestamp]("timestamp_seconds"),
428+
expression[MilliSecondsToTimestamp]("timestamp_milliseconds"),
429+
expression[MicroSecondsToTimestamp]("timestamp_microseconds"),
427430

428431
// collection functions
429432
expression[CreateArray]("array"),

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -401,6 +401,92 @@ case class DayOfYear(child: Expression) extends UnaryExpression with ImplicitCas
401401
}
402402
}
403403

404+
@ExpressionDescription(
405+
usage = "_FUNC_(date) - Returns timestamp from seconds.",
406+
examples = """
407+
Examples:
408+
> SELECT _FUNC_(1230219000);
409+
"2008-12-25 07:30:00.0"
410+
""",
411+
group = "datetime_funcs",
412+
since = "3.1.0")
413+
case class SecondsToTimestamp(child: Expression)
414+
extends NumberToTimestampBase {
415+
416+
override def upScaleFactor: SQLTimestamp = MICROS_PER_SECOND
417+
418+
override def prettyName: String = "timestamp_seconds"
419+
}
420+
421+
@ExpressionDescription(
422+
usage = "_FUNC_(date) - Returns timestamp from milliseconds.",
423+
examples = """
424+
Examples:
425+
> SELECT _FUNC_(1230219000000);
426+
"2008-12-25 07:30:00.0"
427+
""",
428+
group = "datetime_funcs",
429+
since = "3.1.0")
430+
case class MilliSecondsToTimestamp(child: Expression)
431+
extends NumberToTimestampBase {
432+
433+
override def upScaleFactor: SQLTimestamp = MICROS_PER_MILLIS
434+
435+
override def prettyName: String = "timestamp_milliseconds"
436+
}
437+
438+
@ExpressionDescription(
439+
usage = "_FUNC_(date) - Returns timestamp from microseconds.",
440+
examples = """
441+
Examples:
442+
> SELECT _FUNC_(1230219000000000);
443+
"2008-12-25 07:30:00.0"
444+
""",
445+
group = "datetime_funcs",
446+
since = "3.1.0")
447+
case class MicroSecondsToTimestamp(child: Expression)
448+
extends NumberToTimestampBase {
449+
450+
override def upScaleFactor: SQLTimestamp = 1L
451+
452+
override def prettyName: String = "timestamp_microseconds"
453+
}
454+
455+
abstract class NumberToTimestampBase extends UnaryExpression
456+
with ImplicitCastInputTypes{
457+
458+
protected def upScaleFactor: Long
459+
460+
override def inputTypes: Seq[AbstractDataType] = Seq(LongType, IntegerType)
461+
462+
override def dataType: DataType = TimestampType
463+
464+
override def eval(input: InternalRow): Any = {
465+
val t = child.eval(input)
466+
if (t == null) {
467+
null
468+
} else {
469+
child.dataType match {
470+
case IntegerType =>
471+
Math.multiplyExact(t.asInstanceOf[Int].toLong, upScaleFactor)
472+
case LongType =>
473+
Math.multiplyExact(t.asInstanceOf[Long], upScaleFactor)
474+
}
475+
}
476+
}
477+
478+
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
479+
child.dataType match {
480+
case IntegerType =>
481+
defineCodeGen(ctx, ev, _ => s"java.lang.Math.multiplyExact(" +
482+
s"${ev.value.asInstanceOf[Integer].toLong}, ${upScaleFactor})")
483+
case LongType =>
484+
defineCodeGen(ctx, ev, _ => s"java.lang.Math.multiplyExact(" +
485+
s"${ev.value.asInstanceOf[Long]}, ${upScaleFactor})")
486+
}
487+
}
488+
}
489+
404490
@ExpressionDescription(
405491
usage = "_FUNC_(date) - Returns the year component of the date/timestamp.",
406492
examples = """

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3495,6 +3495,28 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark
34953495
assert(df4.schema.head.name === "randn(1)")
34963496
checkIfSeedExistsInExplain(df2)
34973497
}
3498+
3499+
test("SPARK-31710: " +
3500+
"TIMESTAMP_SECONDS, TIMESTAMP_MILLISECONDS and TIMESTAMP_MICROSECONDS to timestamp transfer") {
3501+
val df1 = sql("select TIMESTAMP_SECONDS(1230219000) as timestamp")
3502+
checkAnswer(df1, Row(Timestamp.valueOf("2008-12-25 07:30:00.0")))
3503+
3504+
val df2 = sql("select TIMESTAMP_MILLISECONDS(1230219000000) as timestamp")
3505+
checkAnswer(df2, Row(Timestamp.valueOf("2008-12-25 07:30:00.0")))
3506+
3507+
val df3 = sql("select TIMESTAMP_MICROSECONDS(1230219000000000) as timestamp")
3508+
checkAnswer(df3, Row(Timestamp.valueOf("2008-12-25 07:30:00.0")))
3509+
3510+
val df4 = sql("select TIMESTAMP_SECONDS(-1230219000) as timestamp")
3511+
checkAnswer(df4, Row(Timestamp.valueOf("1931-01-07 00:30:00.0")))
3512+
3513+
val df5 = sql("select TIMESTAMP_MILLISECONDS(-1230219000000) as timestamp")
3514+
checkAnswer(df5, Row(Timestamp.valueOf("1931-01-07 00:30:00.0")))
3515+
3516+
val df6 = sql("select TIMESTAMP_MICROSECONDS(-1230219000000000) as timestamp")
3517+
checkAnswer(df6, Row(Timestamp.valueOf("1931-01-07 00:30:00.0")))
3518+
3519+
}
34983520
}
34993521

35003522
case class Foo(bar: Option[String])

0 commit comments

Comments
 (0)