File tree Expand file tree Collapse file tree 1 file changed +17
-6
lines changed
sql/core/src/main/scala/org/apache/spark/sql/parquet Expand file tree Collapse file tree 1 file changed +17
-6
lines changed Original file line number Diff line number Diff line change @@ -144,19 +144,30 @@ case class ParquetTableScan(
144
144
new Iterator [Row ] {
145
145
def hasNext = iter.hasNext
146
146
def next () = {
147
- val row = iter.next()._2.asInstanceOf [SpecificMutableRow ]
147
+ iter.next()._2 match {
148
+ case row : SpecificMutableRow => {
149
+ // val row = iter.next ()._2.asInstanceOf[SpecificMutableRow]
148
150
149
- // Parquet will leave partitioning columns empty, so we fill them in here.
151
+ // Parquet will leave partitioning columns empty, so we fill them in here.
150
152
var i = 0
151
153
while (i < requestedPartitionOrdinals.size) {
152
- row(requestedPartitionOrdinals(i)._2) =
153
- partitionRowValues(requestedPartitionOrdinals(i)._1)
154
- i += 1
154
+ row (requestedPartitionOrdinals (i)._2) =
155
+ partitionRowValues (requestedPartitionOrdinals (i)._1)
156
+ i += 1
155
157
}
156
158
row
159
+ }
160
+ case row : Row => {
161
+ val rVals = row.to[Array ]
162
+ var i = 0
163
+ while (i < requestedPartitionOrdinals.size) {
164
+ rVals.update(requestedPartitionOrdinals (i)._2,partitionRowValues (requestedPartitionOrdinals (i)._1))
165
+ }
166
+ Row .fromSeq(rVals)
167
+ }
157
168
}
158
169
}
159
- }
170
+ }}
160
171
} else {
161
172
baseRDD.map(_._2)
162
173
}
You can’t perform that action at this time.
0 commit comments