Skip to content

Commit 661e608

Browse files
committed
rebase
1 parent f39256c commit 661e608

File tree

4 files changed

+5
-6
lines changed

4 files changed

+5
-6
lines changed

sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/SpecializedGetters.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919

2020
import org.apache.spark.sql.catalyst.InternalRow;
2121
import org.apache.spark.sql.types.ArrayData;
22-
import org.apache.spark.sql.types.DataType;
2322
import org.apache.spark.sql.types.Decimal;
2423
import org.apache.spark.unsafe.types.CalendarInterval;
2524
import org.apache.spark.unsafe.types.UTF8String;

sql/catalyst/src/main/scala/org/apache/spark/sql/types/GenericArrayData.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.sql.types
1919

2020
import org.apache.spark.sql.catalyst.InternalRow
21-
import org.apache.spark.unsafe.types.{UTF8String, Interval}
21+
import org.apache.spark.unsafe.types.{UTF8String, CalendarInterval}
2222

2323
class GenericArrayData(array: Array[Any]) extends ArrayData {
2424
private def getAs[T](ordinal: Int) = get(ordinal).asInstanceOf[T]
@@ -49,7 +49,7 @@ class GenericArrayData(array: Array[Any]) extends ArrayData {
4949

5050
override def getBinary(ordinal: Int): Array[Byte] = getAs(ordinal)
5151

52-
override def getInterval(ordinal: Int): Interval = getAs(ordinal)
52+
override def getInterval(ordinal: Int): CalendarInterval = getAs(ordinal)
5353

5454
override def getStruct(ordinal: Int, numFields: Int): InternalRow = getAs(ordinal)
5555

sql/core/src/main/scala/org/apache/spark/sql/json/JacksonParser.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -173,10 +173,10 @@ private[sql] object JacksonParser {
173173
private def convertArray(
174174
factory: JsonFactory,
175175
parser: JsonParser,
176-
schema: DataType): ArrayData = {
176+
elementType: DataType): ArrayData = {
177177
val values = scala.collection.mutable.ArrayBuffer.empty[Any]
178178
while (nextUntil(parser, JsonToken.END_ARRAY)) {
179-
values += convertField(factory, parser, schema)
179+
values += convertField(factory, parser, elementType)
180180
}
181181

182182
new GenericArrayData(values.toArray)

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -217,7 +217,7 @@ class HiveInspectorSuite extends SparkFunSuite with HiveInspectors {
217217
test("wrap / unwrap Array Type") {
218218
val dt = ArrayType(dataTypes(0))
219219

220-
val d = row(0) :: row(0) :: Nil
220+
val d = new GenericArrayData(Array(row(0), row(0)))
221221
checkValue(d, unwrap(wrap(d, toInspector(dt), dt), toInspector(dt)))
222222
checkValue(null, unwrap(wrap(null, toInspector(dt), dt), toInspector(dt)))
223223
checkValue(d,

0 commit comments

Comments
 (0)