File tree Expand file tree Collapse file tree 1 file changed +8
-7
lines changed
sql/core/src/main/scala/org/apache/spark/sql/json Expand file tree Collapse file tree 1 file changed +8
-7
lines changed Original file line number Diff line number Diff line change @@ -420,14 +420,14 @@ private[sql] object JsonRDD extends Logging {
420420 case NullType => null
421421 case ArrayType (elementType, _) => {
422422 val arrayLength = value.asInstanceOf [Seq [Any ]].length
423- val arraySlot = if (slot == null ) {
424- (new Array [Any ](arrayLength)).toSeq
425- } else {
423+ val arraySlot = if (slot != null && slot.asInstanceOf [Seq [Any ]].size == arrayLength) {
426424 slot.asInstanceOf [Seq [Any ]]
425+ } else {
426+ (new Array [Any ](arrayLength)).toSeq
427427 }
428428 value.asInstanceOf [Seq [Any ]].zip(arraySlot).map {
429429 case (v, s) => enforceCorrectType(v, elementType,s)
430- }
430+ }.toList
431431 }
432432 case struct : StructType =>
433433 asRow(value.asInstanceOf [Map [String , Any ]], struct, slot.asInstanceOf [GenericMutableRow ])
@@ -441,10 +441,11 @@ private[sql] object JsonRDD extends Logging {
441441 json : Map [String ,Any ],
442442 schema : StructType ,
443443 mutable : GenericMutableRow = null ): Row = {
444- val row = if (mutable == null ) {
445- new GenericMutableRow (schema.fields.length)
446- } else {
444+
445+ val row = if (mutable != null && mutable.length == schema.fields.length) {
447446 mutable
447+ } else {
448+ new GenericMutableRow (schema.fields.length)
448449 }
449450
450451 for (i <- 0 until schema.fields.length) {
You can’t perform that action at this time.
0 commit comments