Skip to content
This repository was archived by the owner on May 9, 2024. It is now read-only.

Commit a5a4820

Browse files
committed
Use explicit types for all numbers in ChainedBuffer
1 parent b7e0213 commit a5a4820

File tree

1 file changed

+11
-11
lines changed

1 file changed

+11
-11
lines changed

core/src/main/scala/org/apache/spark/util/collection/ChainedBuffer.scala

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -46,11 +46,11 @@ private[spark] class ChainedBuffer(chunkSize: Int) {
4646
throw new IndexOutOfBoundsException(
4747
s"Read of $len bytes at position $pos would go past size ${_size} of buffer")
4848
}
49-
var chunkIndex = (pos >> chunkSizeLog2).toInt
50-
var posInChunk = (pos - (chunkIndex << chunkSizeLog2)).toInt
51-
var written = 0
49+
var chunkIndex: Int = (pos >> chunkSizeLog2).toInt
50+
var posInChunk: Int = (pos - (chunkIndex << chunkSizeLog2)).toInt
51+
var written: Int = 0
5252
while (written < len) {
53-
val toRead = math.min(len - written, chunkSize - posInChunk)
53+
val toRead: Int = math.min(len - written, chunkSize - posInChunk)
5454
os.write(chunks(chunkIndex), posInChunk, toRead)
5555
written += toRead
5656
chunkIndex += 1
@@ -71,11 +71,11 @@ private[spark] class ChainedBuffer(chunkSize: Int) {
7171
throw new IndexOutOfBoundsException(
7272
s"Read of $len bytes at position $pos would go past size of buffer")
7373
}
74-
var chunkIndex = (pos >> chunkSizeLog2).toInt
75-
var posInChunk = (pos - (chunkIndex << chunkSizeLog2)).toInt
76-
var written = 0
74+
var chunkIndex: Int = (pos >> chunkSizeLog2).toInt
75+
var posInChunk: Int = (pos - (chunkIndex << chunkSizeLog2)).toInt
76+
var written: Int = 0
7777
while (written < len) {
78-
val toRead = math.min(len - written, chunkSize - posInChunk)
78+
val toRead: Int = math.min(len - written, chunkSize - posInChunk)
7979
System.arraycopy(chunks(chunkIndex), posInChunk, bytes, offs + written, toRead)
8080
written += toRead
8181
chunkIndex += 1
@@ -97,7 +97,7 @@ private[spark] class ChainedBuffer(chunkSize: Int) {
9797
s"Write at position $pos starts after end of buffer ${_size}")
9898
}
9999
// Grow if needed
100-
val endChunkIndex = (pos + len - 1) >> chunkSizeLog2
100+
val endChunkIndex: Int = ((pos + len - 1) >> chunkSizeLog2).toInt
101101
while (endChunkIndex >= chunks.length) {
102102
chunks += new Array[Byte](chunkSize)
103103
}
@@ -106,7 +106,7 @@ private[spark] class ChainedBuffer(chunkSize: Int) {
106106
var posInChunk = (pos - (chunkIndex << chunkSizeLog2)).toInt
107107
var written = 0
108108
while (written < len) {
109-
val toWrite = math.min(len - written, chunkSize - posInChunk)
109+
val toWrite: Int = math.min(len - written, chunkSize - posInChunk)
110110
System.arraycopy(bytes, offs + written, chunks(chunkIndex), posInChunk, toWrite)
111111
written += toWrite
112112
chunkIndex += 1
@@ -131,7 +131,7 @@ private[spark] class ChainedBuffer(chunkSize: Int) {
131131
* Output stream that writes to a ChainedBuffer.
132132
*/
133133
private[spark] class ChainedBufferOutputStream(chainedBuffer: ChainedBuffer) extends OutputStream {
134-
private var pos = 0
134+
private var pos: Long = 0
135135

136136
override def write(b: Int): Unit = {
137137
throw new UnsupportedOperationException()

0 commit comments

Comments
 (0)