Closed
Description
- Version: v10.7.0, v8.11.3
- Platform:
Linux yoga 4.17.2-1-ARCH #1 SMP PREEMPT Sat Jun 16 11:08:59 UTC 2018 x86_64 GNU/Linux
- Subsystem: fs, buffer
While trying to resolve Gzemnid memory problems at nodejs/Gzemnid#18, I eventually reduced those to the following testcase. It seems to me that it's not node-lz4 fault, but something is wrong on the Node.js side.
Testcase:
'use strict';
const fs = require('fs');
fs.writeFileSync('test-small.bin', Buffer.alloc(100));
fs.writeFileSync('test-large.bin', Buffer.alloc(100000));
const maxLength = 4 * 1024 * 1024; // 4 MiB
let built = 0, buf = [], length = 0;
function tick() {
built++;
if (built % 1000 === 0) {
console.log(`RSS [${built}]: ${process.memoryUsage().rss / 1024 / 1024} MiB`);
}
const stream = fs.createReadStream('./test-small.bin');
stream.on('data', function (data) {
//data = Buffer.from(data); // WARNING: uncommenting this line fixes things somehow
buf.push(data)
length += data.length
if (length >= maxLength) {
buf = []
length = 0
}
});
stream.once('end', tock);
}
function tock() {
fs.readFileSync('./test-large.bin');
setImmediate(tick);
/* Alternatively:
const stream = fs.createReadStream('./test-large.bin');
stream.on('data', function() {});
stream.once('end', tick);
*/
}
tick();
Adding data = Buffer.from(data);
fixes things somehow, problems start when the exact same chunks from the stream are retained for some time and some larger file reading goes on.
gc-ing manually does not help — this looks like a memory leak.
All that memory is allocated through node::ArrayBufferAllocator::Allocate
.
/cc @addaleax @nodejs/buffer