Skip to content

Commit 0d69604

Browse files
FLUME-1729. Better Flume-Spark integration.
Use readFully instead of read in EventTransformer.
1 parent 3c23c18 commit 0d69604

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,20 +33,20 @@ object EventTransformer extends Logging {
3333
Array[Byte]) = {
3434
val bodyLength = in.readInt()
3535
val bodyBuff = new Array[Byte](bodyLength)
36-
in.read(bodyBuff)
36+
in.readFully(bodyBuff)
3737

3838
val numHeaders = in.readInt()
3939
val headers = new java.util.HashMap[CharSequence, CharSequence]
4040

4141
for (i <- 0 until numHeaders) {
4242
val keyLength = in.readInt()
4343
val keyBuff = new Array[Byte](keyLength)
44-
in.read(keyBuff)
44+
in.readFully(keyBuff)
4545
val key: String = Utils.deserialize(keyBuff)
4646

4747
val valLength = in.readInt()
4848
val valBuff = new Array[Byte](valLength)
49-
in.read(valBuff)
49+
in.readFully(valBuff)
5050
val value: String = Utils.deserialize(valBuff)
5151

5252
headers.put(key, value)

0 commit comments

Comments
 (0)