File tree Expand file tree Collapse file tree 1 file changed +8
-7
lines changed
sql/core/src/main/scala/org/apache/spark/sql/json Expand file tree Collapse file tree 1 file changed +8
-7
lines changed Original file line number Diff line number Diff line change @@ -420,14 +420,14 @@ private[sql] object JsonRDD extends Logging {
420
420
case NullType => null
421
421
case ArrayType (elementType, _) => {
422
422
val arrayLength = value.asInstanceOf [Seq [Any ]].length
423
- val arraySlot = if (slot == null ) {
424
- (new Array [Any ](arrayLength)).toSeq
425
- } else {
423
+ val arraySlot = if (slot != null && slot.asInstanceOf [Seq [Any ]].size == arrayLength) {
426
424
slot.asInstanceOf [Seq [Any ]]
425
+ } else {
426
+ (new Array [Any ](arrayLength)).toSeq
427
427
}
428
428
value.asInstanceOf [Seq [Any ]].zip(arraySlot).map {
429
429
case (v, s) => enforceCorrectType(v, elementType,s)
430
- }
430
+ }.toList
431
431
}
432
432
case struct : StructType =>
433
433
asRow(value.asInstanceOf [Map [String , Any ]], struct, slot.asInstanceOf [GenericMutableRow ])
@@ -441,10 +441,11 @@ private[sql] object JsonRDD extends Logging {
441
441
json : Map [String ,Any ],
442
442
schema : StructType ,
443
443
mutable : GenericMutableRow = null ): Row = {
444
- val row = if (mutable == null ) {
445
- new GenericMutableRow (schema.fields.length)
446
- } else {
444
+
445
+ val row = if (mutable != null && mutable.length == schema.fields.length) {
447
446
mutable
447
+ } else {
448
+ new GenericMutableRow (schema.fields.length)
448
449
}
449
450
450
451
for (i <- 0 until schema.fields.length) {
You can’t perform that action at this time.
0 commit comments