Skip to content

Commit 30deb62

Browse files
committed
streams: use Array for Readable buffer
1 parent 25576b5 commit 30deb62

File tree

4 files changed

+117
-126
lines changed

4 files changed

+117
-126
lines changed

benchmark/streams/readable-bigread.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ function main({ n }) {
1515

1616
bench.start();
1717
for (let k = 0; k < n; ++k) {
18-
for (let i = 0; i < 1e4; ++i)
18+
for (let i = 0; i < 1e3; ++i)
1919
s.push(b);
2020
while (s.read(128));
2121
}

benchmark/streams/readable-readall.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ function main({ n }) {
1515

1616
bench.start();
1717
for (let k = 0; k < n; ++k) {
18-
for (let i = 0; i < 1e4; ++i)
18+
for (let i = 0; i < 1e3; ++i)
1919
s.push(b);
2020
while (s.read());
2121
}

lib/internal/streams/readable.js

Lines changed: 115 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ const {
3434
SymbolAsyncDispose,
3535
SymbolAsyncIterator,
3636
Symbol,
37+
TypedArrayPrototypeSet,
3738
} = primordials;
3839

3940
module.exports = Readable;
@@ -73,6 +74,7 @@ const {
7374
const { validateObject } = require('internal/validators');
7475

7576
const kState = Symbol('kState');
77+
const FastBuffer = Buffer[Symbol.species];
7678

7779
const { StringDecoder } = require('string_decoder');
7880
const from = require('internal/streams/from');
@@ -278,7 +280,8 @@ function ReadableState(options, stream, isDuplex) {
278280
// A linked list is used to store data chunks instead of an array because the
279281
// linked list can remove elements from the beginning faster than
280282
// array.shift().
281-
this.buffer = new BufferList();
283+
this.buffer = [];
284+
this.bufferIndex = 0;
282285
this.length = 0;
283286
this.pipes = [];
284287

@@ -546,10 +549,15 @@ function addChunk(stream, state, chunk, addToFront) {
546549
} else {
547550
// Update the buffer info.
548551
state.length += (state[kState] & kObjectMode) !== 0 ? 1 : chunk.length;
549-
if (addToFront)
550-
state.buffer.unshift(chunk);
551-
else
552+
if (addToFront) {
553+
if (state.bufferIndex > 0) {
554+
state.buffer[--state.bufferIndex] = chunk;
555+
} else {
556+
state.buffer.unshift(chunk); // Slow path
557+
}
558+
} else {
552559
state.buffer.push(chunk);
560+
}
553561

554562
if ((state[kState] & kNeedReadable) !== 0)
555563
emitReadable(stream);
@@ -564,21 +572,24 @@ Readable.prototype.isPaused = function() {
564572

565573
// Backwards compatibility.
566574
Readable.prototype.setEncoding = function(enc) {
575+
const state = this._readableState;
576+
567577
const decoder = new StringDecoder(enc);
568-
this._readableState.decoder = decoder;
578+
state.decoder = decoder;
569579
// If setEncoding(null), decoder.encoding equals utf8.
570-
this._readableState.encoding = this._readableState.decoder.encoding;
580+
state.encoding = state.decoder.encoding;
571581

572-
const buffer = this._readableState.buffer;
573582
// Iterate over current buffer to convert already stored Buffers:
574583
let content = '';
575-
for (const data of buffer) {
584+
for (const data of state.buffer.slice(state.bufferIndex)) {
576585
content += decoder.write(data);
577586
}
578-
buffer.clear();
587+
state.buffer.length = 0;
588+
state.bufferIndex = 0;
589+
579590
if (content !== '')
580-
buffer.push(content);
581-
this._readableState.length = content.length;
591+
state.buffer.push(content);
592+
state.length = content.length;
582593
return this;
583594
};
584595

@@ -611,7 +622,7 @@ function howMuchToRead(n, state) {
611622
if (NumberIsNaN(n)) {
612623
// Only flow one buffer at a time.
613624
if ((state[kState] & kFlowing) !== 0 && state.length)
614-
return state.buffer.first().length;
625+
return state.buffer[state.bufferIndex].length;
615626
return state.length;
616627
}
617628
if (n <= state.length)
@@ -1550,20 +1561,101 @@ function fromList(n, state) {
15501561
return null;
15511562

15521563
let ret;
1553-
if (state.objectMode)
1554-
ret = state.buffer.shift();
1555-
else if (!n || n >= state.length) {
1564+
if ((state[kState] & kObjectMode) !== 0) {
1565+
ret = state.buffer[state.bufferIndex++];
1566+
} else if (!n || n >= state.length) {
15561567
// Read it all, truncate the list.
1557-
if (state.decoder)
1558-
ret = state.buffer.join('');
1559-
else if (state.buffer.length === 1)
1560-
ret = state.buffer.first();
1561-
else
1562-
ret = state.buffer.concat(state.length);
1563-
state.buffer.clear();
1568+
if ((state[kState] & kDecoder) !== 0) {
1569+
ret = ''
1570+
for (let n = state.bufferIndex; n < state.buffer.length; n++) {
1571+
ret += state.buffer[n];
1572+
}
1573+
} else if (state.buffer.length - state.bufferIndex === 0) {
1574+
ret = Buffer.alloc(0)
1575+
} else if (state.buffer.length - state.bufferIndex === 1) {
1576+
ret = state.buffer[state.bufferIndex];
1577+
} else {
1578+
ret = Buffer.allocUnsafe(state.length);
1579+
1580+
const idx = state.bufferIndex;
1581+
const buf = state.buffer;
1582+
const len = buf.length;
1583+
1584+
let i = 0;
1585+
for (let n = idx; n < len; n++) {
1586+
TypedArrayPrototypeSet(ret, buf[n], i);
1587+
i += buf[n].length;
1588+
}
1589+
}
1590+
state.buffer.length = 0;
1591+
state.bufferIndex = 0;
15641592
} else {
15651593
// read part of list.
1566-
ret = state.buffer.consume(n, state.decoder);
1594+
1595+
const buf = state.buffer;
1596+
const len = buf.length;
1597+
1598+
let idx = state.bufferIndex;
1599+
1600+
if (n < buf[idx].length) {
1601+
// `slice` is the same for buffers and strings.
1602+
ret = buf[idx].slice(0, n);
1603+
buf[idx] = buf[idx].slice(n);
1604+
} else if (n === buf[idx].length) {
1605+
// First chunk is a perfect match.
1606+
ret = buf[idx];
1607+
buf[idx++] = null;
1608+
} else if ((state[kState] & kDecoder) !== 0) {
1609+
ret = '';
1610+
while (idx < state.buffer.length) {
1611+
const str = buf[idx];
1612+
if (n > str.length) {
1613+
ret += str;
1614+
n -= str.length;
1615+
buf[idx++] = null;
1616+
} else {
1617+
if (n === buf.length) {
1618+
ret += str;
1619+
buf[idx++] = null;
1620+
} else {
1621+
ret += str.slice(0, n);
1622+
buf[idx] = str.slice(n);
1623+
}
1624+
break;
1625+
}
1626+
}
1627+
} else {
1628+
ret = Buffer.allocUnsafe(n);
1629+
1630+
const retLen = n;
1631+
while (idx < len) {
1632+
const data = buf[idx];
1633+
if (n > data.length) {
1634+
TypedArrayPrototypeSet(ret, data, retLen - n);
1635+
n -= data.length;
1636+
buf[idx++] = null;
1637+
} else {
1638+
if (n === data.length) {
1639+
TypedArrayPrototypeSet(ret, data, retLen - n);
1640+
buf[idx++] = null;
1641+
} else {
1642+
TypedArrayPrototypeSet(ret, new FastBuffer(data.buffer, data.byteOffset, n), retLen - n);
1643+
buf[idx] = new FastBuffer(data.buffer, data.byteOffset + n);
1644+
}
1645+
break;
1646+
}
1647+
}
1648+
}
1649+
1650+
if (idx === buf.length) {
1651+
state.buffer.length = 0;
1652+
state.bufferIndex = 0
1653+
} else if (idx > 1024) {
1654+
state.buffer.splice(0, idx);
1655+
state.bufferIndex = 0;
1656+
} else {
1657+
state.bufferIndex = idx;
1658+
}
15671659
}
15681660

15691661
return ret;

test/parallel/test-stream2-readable-from-list.js

Lines changed: 0 additions & 101 deletions
This file was deleted.

0 commit comments

Comments
 (0)