Skip to content
This repository was archived by the owner on May 9, 2024. It is now read-only.

Commit 49d8a0d

Browse files
committed
Fix bug introduced when reading over record boundaries
1 parent 6006856 commit 49d8a0d

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,8 @@ private[spark] class OrderedInputStream(metaBuffer: IntBuffer, kvBuffer: Chained
178178
if (metaBufferPos >= metaBuffer.position) {
179179
return -1
180180
}
181-
val bytesRemainingInRecord = metaBuffer.get(metaBufferPos + KEY_VAL_LEN)
181+
val bytesRemainingInRecord = (metaBuffer.get(metaBufferPos + KEY_VAL_LEN) -
182+
(kvBufferPos - getKeyStartPos(metaBuffer, metaBufferPos))).toInt
182183
val toRead = math.min(bytesRemainingInRecord, len)
183184
kvBuffer.read(kvBufferPos, bytes, offs, toRead)
184185
if (toRead == bytesRemainingInRecord) {

0 commit comments

Comments
 (0)