Skip to content

Commit a3d8056

Browse files
mundaymdongjoon-hyun
authored andcommitted
[SPARK-32458][SQL][TESTS] Fix incorrectly sized row value reads
### What changes were proposed in this pull request? Updates to tests to use correctly sized `getInt` or `getLong` calls. ### Why are the changes needed? The reads were incorrectly sized (i.e. `putLong` paired with `getInt` and `putInt` paired with `getLong`). This causes test failures on big-endian systems. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Tests were run on a big-endian system (s390x). This change is unlikely to have any practical effect on little-endian systems. Closes #29258 from mundaym/fix-row. Authored-by: Michael Munday <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent 44c868b commit a3d8056

File tree

2 files changed

+5
-5
lines changed

2 files changed

+5
-5
lines changed

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -336,7 +336,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest {
336336
val encoder = RowEncoder(schema).resolveAndBind()
337337
val localDate = java.time.LocalDate.parse("2019-02-27")
338338
val row = toRow(encoder, Row(localDate))
339-
assert(row.getLong(0) === DateTimeUtils.localDateToDays(localDate))
339+
assert(row.getInt(0) === DateTimeUtils.localDateToDays(localDate))
340340
val readback = fromRow(encoder, row)
341341
assert(readback.get(0).equals(localDate))
342342
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/UnsafeMapSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,17 +48,17 @@ class UnsafeMapSuite extends SparkFunSuite {
4848
val ser = new JavaSerializer(new SparkConf).newInstance()
4949
val mapDataSer = ser.deserialize[UnsafeMapData](ser.serialize(unsafeMapData))
5050
assert(mapDataSer.numElements() == 1)
51-
assert(mapDataSer.keyArray().getInt(0) == 19285)
52-
assert(mapDataSer.valueArray().getInt(0) == 19286)
51+
assert(mapDataSer.keyArray().getLong(0) == 19285)
52+
assert(mapDataSer.valueArray().getLong(0) == 19286)
5353
assert(mapDataSer.getBaseObject.asInstanceOf[Array[Byte]].length == 1024)
5454
}
5555

5656
test("unsafe Kryo serialization") {
5757
val ser = new KryoSerializer(new SparkConf).newInstance()
5858
val mapDataSer = ser.deserialize[UnsafeMapData](ser.serialize(unsafeMapData))
5959
assert(mapDataSer.numElements() == 1)
60-
assert(mapDataSer.keyArray().getInt(0) == 19285)
61-
assert(mapDataSer.valueArray().getInt(0) == 19286)
60+
assert(mapDataSer.keyArray().getLong(0) == 19285)
61+
assert(mapDataSer.valueArray().getLong(0) == 19286)
6262
assert(mapDataSer.getBaseObject.asInstanceOf[Array[Byte]].length == 1024)
6363
}
6464
}

0 commit comments

Comments
 (0)