Skip to content

Commit 5340dfa

Browse files
committed
[SPARK-9430][SQL] Rename IntervalType to CalendarIntervalType.
We want to introduce a new IntervalType in 1.6 that is based on only the number of microseoncds, so interval can be compared. Renaming the existing IntervalType to CalendarIntervalType so we can do that in the future. Author: Reynold Xin <[email protected]> Closes #7745 from rxin/calendarintervaltype and squashes the following commits: 99f64e8 [Reynold Xin] One more line ... 13466c8 [Reynold Xin] Fixed tests. e20f24e [Reynold Xin] [SPARK-9430][SQL] Rename IntervalType to CalendarIntervalType.
1 parent 819be46 commit 5340dfa

File tree

23 files changed

+286
-252
lines changed

23 files changed

+286
-252
lines changed

sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/SpecializedGetters.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
import org.apache.spark.sql.catalyst.InternalRow;
2121
import org.apache.spark.sql.types.Decimal;
22-
import org.apache.spark.unsafe.types.Interval;
22+
import org.apache.spark.unsafe.types.CalendarInterval;
2323
import org.apache.spark.unsafe.types.UTF8String;
2424

2525
public interface SpecializedGetters {
@@ -46,7 +46,7 @@ public interface SpecializedGetters {
4646

4747
byte[] getBinary(int ordinal);
4848

49-
Interval getInterval(int ordinal);
49+
CalendarInterval getInterval(int ordinal);
5050

5151
InternalRow getStruct(int ordinal, int numFields);
5252

sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
import org.apache.spark.unsafe.array.ByteArrayMethods;
3030
import org.apache.spark.unsafe.bitset.BitSetMethods;
3131
import org.apache.spark.unsafe.hash.Murmur3_x86_32;
32-
import org.apache.spark.unsafe.types.Interval;
32+
import org.apache.spark.unsafe.types.CalendarInterval;
3333
import org.apache.spark.unsafe.types.UTF8String;
3434

3535
import static org.apache.spark.sql.types.DataTypes.*;
@@ -92,7 +92,7 @@ public static int calculateBitSetWidthInBytes(int numFields) {
9292
Arrays.asList(new DataType[]{
9393
StringType,
9494
BinaryType,
95-
IntervalType
95+
CalendarIntervalType
9696
}));
9797
_readableFieldTypes.addAll(settableFieldTypes);
9898
readableFieldTypes = Collections.unmodifiableSet(_readableFieldTypes);
@@ -265,7 +265,7 @@ public Object get(int ordinal, DataType dataType) {
265265
return getBinary(ordinal);
266266
} else if (dataType instanceof StringType) {
267267
return getUTF8String(ordinal);
268-
} else if (dataType instanceof IntervalType) {
268+
} else if (dataType instanceof CalendarIntervalType) {
269269
return getInterval(ordinal);
270270
} else if (dataType instanceof StructType) {
271271
return getStruct(ordinal, ((StructType) dataType).size());
@@ -350,7 +350,7 @@ public byte[] getBinary(int ordinal) {
350350
}
351351

352352
@Override
353-
public Interval getInterval(int ordinal) {
353+
public CalendarInterval getInterval(int ordinal) {
354354
if (isNullAt(ordinal)) {
355355
return null;
356356
} else {
@@ -359,7 +359,7 @@ public Interval getInterval(int ordinal) {
359359
final int months = (int) PlatformDependent.UNSAFE.getLong(baseObject, baseOffset + offset);
360360
final long microseconds =
361361
PlatformDependent.UNSAFE.getLong(baseObject, baseOffset + offset + 8);
362-
return new Interval(months, microseconds);
362+
return new CalendarInterval(months, microseconds);
363363
}
364364
}
365365

sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
import org.apache.spark.unsafe.PlatformDependent;
2222
import org.apache.spark.unsafe.array.ByteArrayMethods;
2323
import org.apache.spark.unsafe.types.ByteArray;
24-
import org.apache.spark.unsafe.types.Interval;
24+
import org.apache.spark.unsafe.types.CalendarInterval;
2525
import org.apache.spark.unsafe.types.UTF8String;
2626

2727
/**
@@ -131,7 +131,7 @@ public static int write(UnsafeRow target, int ordinal, int cursor, InternalRow i
131131
/** Writer for interval type. */
132132
public static class IntervalWriter {
133133

134-
public static int write(UnsafeRow target, int ordinal, int cursor, Interval input) {
134+
public static int write(UnsafeRow target, int ordinal, int cursor, CalendarInterval input) {
135135
final long offset = target.getBaseOffset() + cursor;
136136

137137
// Write the months and microseconds fields of Interval to the variable length portion.

sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,9 @@ public class DataTypes {
5050
public static final DataType TimestampType = TimestampType$.MODULE$;
5151

5252
/**
53-
* Gets the IntervalType object.
53+
* Gets the CalendarIntervalType object.
5454
*/
55-
public static final DataType IntervalType = IntervalType$.MODULE$;
55+
public static final DataType CalendarIntervalType = CalendarIntervalType$.MODULE$;
5656

5757
/**
5858
* Gets the DoubleType object.

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/InternalRow.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst
2020
import org.apache.spark.sql.Row
2121
import org.apache.spark.sql.catalyst.expressions._
2222
import org.apache.spark.sql.types._
23-
import org.apache.spark.unsafe.types.{Interval, UTF8String}
23+
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
2424

2525
/**
2626
* An abstract class for row used internal in Spark SQL, which only contain the columns as
@@ -61,7 +61,8 @@ abstract class InternalRow extends Serializable with SpecializedGetters {
6161
override def getDecimal(ordinal: Int): Decimal =
6262
getAs[Decimal](ordinal, DecimalType.SYSTEM_DEFAULT)
6363

64-
override def getInterval(ordinal: Int): Interval = getAs[Interval](ordinal, IntervalType)
64+
override def getInterval(ordinal: Int): CalendarInterval =
65+
getAs[CalendarInterval](ordinal, CalendarIntervalType)
6566

6667
// This is only use for test and will throw a null pointer exception if the position is null.
6768
def getString(ordinal: Int): String = getUTF8String(ordinal).toString

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.expressions._
2525
import org.apache.spark.sql.catalyst.plans._
2626
import org.apache.spark.sql.catalyst.plans.logical._
2727
import org.apache.spark.sql.types._
28-
import org.apache.spark.unsafe.types.Interval
28+
import org.apache.spark.unsafe.types.CalendarInterval
2929

3030
/**
3131
* A very simple SQL parser. Based loosely on:
@@ -365,32 +365,32 @@ class SqlParser extends AbstractSparkSQLParser with DataTypeParser {
365365

366366
protected lazy val millisecond: Parser[Long] =
367367
integral <~ intervalUnit("millisecond") ^^ {
368-
case num => num.toLong * Interval.MICROS_PER_MILLI
368+
case num => num.toLong * CalendarInterval.MICROS_PER_MILLI
369369
}
370370

371371
protected lazy val second: Parser[Long] =
372372
integral <~ intervalUnit("second") ^^ {
373-
case num => num.toLong * Interval.MICROS_PER_SECOND
373+
case num => num.toLong * CalendarInterval.MICROS_PER_SECOND
374374
}
375375

376376
protected lazy val minute: Parser[Long] =
377377
integral <~ intervalUnit("minute") ^^ {
378-
case num => num.toLong * Interval.MICROS_PER_MINUTE
378+
case num => num.toLong * CalendarInterval.MICROS_PER_MINUTE
379379
}
380380

381381
protected lazy val hour: Parser[Long] =
382382
integral <~ intervalUnit("hour") ^^ {
383-
case num => num.toLong * Interval.MICROS_PER_HOUR
383+
case num => num.toLong * CalendarInterval.MICROS_PER_HOUR
384384
}
385385

386386
protected lazy val day: Parser[Long] =
387387
integral <~ intervalUnit("day") ^^ {
388-
case num => num.toLong * Interval.MICROS_PER_DAY
388+
case num => num.toLong * CalendarInterval.MICROS_PER_DAY
389389
}
390390

391391
protected lazy val week: Parser[Long] =
392392
integral <~ intervalUnit("week") ^^ {
393-
case num => num.toLong * Interval.MICROS_PER_WEEK
393+
case num => num.toLong * CalendarInterval.MICROS_PER_WEEK
394394
}
395395

396396
protected lazy val intervalLiteral: Parser[Literal] =
@@ -406,7 +406,7 @@ class SqlParser extends AbstractSparkSQLParser with DataTypeParser {
406406
val months = Seq(year, month).map(_.getOrElse(0)).sum
407407
val microseconds = Seq(week, day, hour, minute, second, millisecond, microsecond)
408408
.map(_.getOrElse(0L)).sum
409-
Literal.create(new Interval(months, microseconds), IntervalType)
409+
Literal.create(new CalendarInterval(months, microseconds), CalendarIntervalType)
410410
}
411411

412412
private def toNarrowestIntegerType(value: String): Any = {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ case class BoundReference(ordinal: Int, dataType: DataType, nullable: Boolean)
4848
case DoubleType => input.getDouble(ordinal)
4949
case StringType => input.getUTF8String(ordinal)
5050
case BinaryType => input.getBinary(ordinal)
51-
case IntervalType => input.getInterval(ordinal)
51+
case CalendarIntervalType => input.getInterval(ordinal)
5252
case t: StructType => input.getStruct(ordinal, t.size)
5353
case _ => input.get(ordinal, dataType)
5454
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
2424
import org.apache.spark.sql.catalyst.expressions.codegen._
2525
import org.apache.spark.sql.catalyst.util.DateTimeUtils
2626
import org.apache.spark.sql.types._
27-
import org.apache.spark.unsafe.types.{Interval, UTF8String}
27+
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
2828

2929
import scala.collection.mutable
3030

@@ -55,7 +55,7 @@ object Cast {
5555

5656
case (_, DateType) => true
5757

58-
case (StringType, IntervalType) => true
58+
case (StringType, CalendarIntervalType) => true
5959

6060
case (StringType, _: NumericType) => true
6161
case (BooleanType, _: NumericType) => true
@@ -225,7 +225,7 @@ case class Cast(child: Expression, dataType: DataType)
225225
// IntervalConverter
226226
private[this] def castToInterval(from: DataType): Any => Any = from match {
227227
case StringType =>
228-
buildCast[UTF8String](_, s => Interval.fromString(s.toString))
228+
buildCast[UTF8String](_, s => CalendarInterval.fromString(s.toString))
229229
case _ => _ => null
230230
}
231231

@@ -398,7 +398,7 @@ case class Cast(child: Expression, dataType: DataType)
398398
case DateType => castToDate(from)
399399
case decimal: DecimalType => castToDecimal(from, decimal)
400400
case TimestampType => castToTimestamp(from)
401-
case IntervalType => castToInterval(from)
401+
case CalendarIntervalType => castToInterval(from)
402402
case BooleanType => castToBoolean(from)
403403
case ByteType => castToByte(from)
404404
case ShortType => castToShort(from)
@@ -438,7 +438,7 @@ case class Cast(child: Expression, dataType: DataType)
438438
case DateType => castToDateCode(from, ctx)
439439
case decimal: DecimalType => castToDecimalCode(from, decimal)
440440
case TimestampType => castToTimestampCode(from, ctx)
441-
case IntervalType => castToIntervalCode(from)
441+
case CalendarIntervalType => castToIntervalCode(from)
442442
case BooleanType => castToBooleanCode(from)
443443
case ByteType => castToByteCode(from)
444444
case ShortType => castToShortCode(from)
@@ -630,7 +630,7 @@ case class Cast(child: Expression, dataType: DataType)
630630
private[this] def castToIntervalCode(from: DataType): CastFunction = from match {
631631
case StringType =>
632632
(c, evPrim, evNull) =>
633-
s"$evPrim = Interval.fromString($c.toString());"
633+
s"$evPrim = CalendarInterval.fromString($c.toString());"
634634
}
635635

636636
private[this] def decimalToTimestampCode(d: String): String =

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import org.apache.spark.sql.catalyst.InternalRow
2121
import org.apache.spark.sql.catalyst.expressions.codegen._
2222
import org.apache.spark.sql.catalyst.util.TypeUtils
2323
import org.apache.spark.sql.types._
24-
import org.apache.spark.unsafe.types.Interval
24+
import org.apache.spark.unsafe.types.CalendarInterval
2525

2626

2727
case class UnaryMinus(child: Expression) extends UnaryExpression with ExpectsInputTypes {
@@ -37,12 +37,12 @@ case class UnaryMinus(child: Expression) extends UnaryExpression with ExpectsInp
3737
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = dataType match {
3838
case dt: DecimalType => defineCodeGen(ctx, ev, c => s"$c.unary_$$minus()")
3939
case dt: NumericType => defineCodeGen(ctx, ev, c => s"(${ctx.javaType(dt)})(-($c))")
40-
case dt: IntervalType => defineCodeGen(ctx, ev, c => s"$c.negate()")
40+
case dt: CalendarIntervalType => defineCodeGen(ctx, ev, c => s"$c.negate()")
4141
}
4242

4343
protected override def nullSafeEval(input: Any): Any = {
44-
if (dataType.isInstanceOf[IntervalType]) {
45-
input.asInstanceOf[Interval].negate()
44+
if (dataType.isInstanceOf[CalendarIntervalType]) {
45+
input.asInstanceOf[CalendarInterval].negate()
4646
} else {
4747
numeric.negate(input)
4848
}
@@ -121,8 +121,8 @@ case class Add(left: Expression, right: Expression) extends BinaryArithmetic {
121121
private lazy val numeric = TypeUtils.getNumeric(dataType)
122122

123123
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
124-
if (dataType.isInstanceOf[IntervalType]) {
125-
input1.asInstanceOf[Interval].add(input2.asInstanceOf[Interval])
124+
if (dataType.isInstanceOf[CalendarIntervalType]) {
125+
input1.asInstanceOf[CalendarInterval].add(input2.asInstanceOf[CalendarInterval])
126126
} else {
127127
numeric.plus(input1, input2)
128128
}
@@ -134,7 +134,7 @@ case class Add(left: Expression, right: Expression) extends BinaryArithmetic {
134134
case ByteType | ShortType =>
135135
defineCodeGen(ctx, ev,
136136
(eval1, eval2) => s"(${ctx.javaType(dataType)})($eval1 $symbol $eval2)")
137-
case IntervalType =>
137+
case CalendarIntervalType =>
138138
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1.add($eval2)")
139139
case _ =>
140140
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1 $symbol $eval2")
@@ -150,8 +150,8 @@ case class Subtract(left: Expression, right: Expression) extends BinaryArithmeti
150150
private lazy val numeric = TypeUtils.getNumeric(dataType)
151151

152152
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
153-
if (dataType.isInstanceOf[IntervalType]) {
154-
input1.asInstanceOf[Interval].subtract(input2.asInstanceOf[Interval])
153+
if (dataType.isInstanceOf[CalendarIntervalType]) {
154+
input1.asInstanceOf[CalendarInterval].subtract(input2.asInstanceOf[CalendarInterval])
155155
} else {
156156
numeric.minus(input1, input2)
157157
}
@@ -163,7 +163,7 @@ case class Subtract(left: Expression, right: Expression) extends BinaryArithmeti
163163
case ByteType | ShortType =>
164164
defineCodeGen(ctx, ev,
165165
(eval1, eval2) => s"(${ctx.javaType(dataType)})($eval1 $symbol $eval2)")
166-
case IntervalType =>
166+
case CalendarIntervalType =>
167167
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1.subtract($eval2)")
168168
case _ =>
169169
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1 $symbol $eval2")

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ class CodeGenContext {
108108
case _ if isPrimitiveType(jt) => s"$row.get${primitiveTypeName(jt)}($ordinal)"
109109
case StringType => s"$row.getUTF8String($ordinal)"
110110
case BinaryType => s"$row.getBinary($ordinal)"
111-
case IntervalType => s"$row.getInterval($ordinal)"
111+
case CalendarIntervalType => s"$row.getInterval($ordinal)"
112112
case t: StructType => s"$row.getStruct($ordinal, ${t.size})"
113113
case _ => s"($jt)$row.get($ordinal)"
114114
}
@@ -150,7 +150,7 @@ class CodeGenContext {
150150
case dt: DecimalType => "Decimal"
151151
case BinaryType => "byte[]"
152152
case StringType => "UTF8String"
153-
case IntervalType => "Interval"
153+
case CalendarIntervalType => "CalendarInterval"
154154
case _: StructType => "InternalRow"
155155
case _: ArrayType => s"scala.collection.Seq"
156156
case _: MapType => s"scala.collection.Map"
@@ -293,7 +293,7 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: AnyRef] extends Loggin
293293
classOf[UnsafeRow].getName,
294294
classOf[UTF8String].getName,
295295
classOf[Decimal].getName,
296-
classOf[Interval].getName
296+
classOf[CalendarInterval].getName
297297
))
298298
evaluator.setExtendedClass(classOf[GeneratedClass])
299299
try {

0 commit comments

Comments
 (0)