I\'m writing a large file with node.js using a writable stream:
var fs = require(\'fs\');
var stream = fs.createWriteStream(\'someFile.txt\', { flags : \
I found streams to be a poor performing way to deal with large files - this is because you cannot set an adequate input buffer size (at least I'm not aware of a good way to do it). This is what I do:
var fs = require('fs');
var i = fs.openSync('input.txt', 'r');
var o = fs.openSync('output.txt', 'w');
var buf = new Buffer(1024 * 1024), len, prev = '';
while(len = fs.readSync(i, buf, 0, buf.length)) {
var a = (prev + buf.toString('ascii', 0, len)).split('\n');
prev = len === buf.length ? '\n' + a.splice(a.length - 1)[0] : '';
var out = '';
a.forEach(function(line) {
if(!line)
return;
// do something with your line here
out += line + '\n';
});
var bout = new Buffer(out, 'ascii');
fs.writeSync(o, bout, 0, bout.length);
}
fs.closeSync(o);
fs.closeSync(i);
That's how I finally did it. The idea behind is to create readable stream implementing ReadStream interface and then use pipe()
method to pipe data to writable stream.
var fs = require('fs');
var writeStream = fs.createWriteStream('someFile.txt', { flags : 'w' });
var readStream = new MyReadStream();
readStream.pipe(writeStream);
writeStream.on('close', function () {
console.log('All done!');
});
The example of MyReadStream
class can be taken from mongoose QueryStream.
[Edit] The updated Node.js writable.write(...) API docs say:
[The] return value is strictly advisory. You MAY continue to write, even if it returns false. However, writes will be buffered in memory, so it is best not to do this excessively. Instead, wait for the drain event before writing more data.
[Original] From the stream.write(...) documentation (emphasis mine):
Returns
true
if the string has been flushed to the kernel buffer. Returnsfalse
to indicate that the kernel buffer is full, and the data will be sent out in the future.
I interpret this to mean that the "write" function returns true
if the given string was immediately written to the underlying OS buffer or false
if it was not yet written but will be written by the write function (e.g. was presumably buffered for you by the WriteStream) so that you do not have to call "write" again.
If you do not happen to have an input stream you cannot easily use pipe. None of the above worked for me, the drain event doesn't fire. Solved as follows (based on Tylers answer):
var lines[]; // some very large array
var i = 0;
function write() {
if (i < lines.length) {
wstream.write(lines[i]), function(err){
if (err) {
console.log(err);
} else {
i++;
write();
}
});
} else {
wstream.end();
console.log("done");
}
};
write();
Several suggested answers to this question have missed the point about streams altogether.
This module can help https://www.npmjs.org/package/JSONStream
However, lets suppose the situation as described and write the code ourselves. You are reading from a MongoDB as a stream, with ObjectMode = true by default.
This will lead to issues if you try to directly stream to file - something like "Invalid non-string/buffer chunk" error.
The solution to this type of problem is very simple.
Just put another Transform in between the readable and writeable to adapt the Object readable to a String writeable appropriately.
Sample Code Solution:
var fs = require('fs'),
writeStream = fs.createWriteStream('./out' + process.pid, {flags: 'w', encoding: 'utf-8' }),
stream = require('stream'),
stringifier = new stream.Transform();
stringifier._writableState.objectMode = true;
stringifier._transform = function (data, encoding, done) {
this.push(JSON.stringify(data));
this.push('\n');
done();
}
rowFeedDao.getRowFeedsStream(merchantId, jobId)
.pipe(stringifier)
.pipe(writeStream).on('error', function (err) {
// handle error condition
}
The idea behind drain is that you would use it to test here:
var fs = require('fs');
var stream = fs.createWriteStream('someFile.txt', {flags: 'w'});
var lines;
while (lines = getLines()) {
for (var i = 0; i < lines.length; i++) {
stream.write(lines[i]); //<-- the place to test
}
}
which you're not. So you would need to rearchitect to make it "reentrant".
var fs = require('fs');
var stream = fs.createWriteStream('someFile.txt', {flags: 'w'});
var lines;
while (lines = getLines()) {
for (var i = 0; i < lines.length; i++) {
var written = stream.write(lines[i]); //<-- the place to test
if (!written){
//do something here to wait till you can safely write again
//this means prepare a buffer and wait till you can come back to finish
// lines[i] -> remainder
}
}
}
However, does this mean that you need to keep buffering getLines as well while you wait?
var fs = require('fs');
var stream = fs.createWriteStream('someFile.txt', {flags: 'w'});
var lines,
buffer = {
remainingLines = []
};
while (lines = getLines()) {
for (var i = 0; i < lines.length; i++) {
var written = stream.write(lines[i]); //<-- the place to test
if (!written){
//do something here to wait till you can safely write again
//this means prepare a buffer and wait till you can come back to finish
// lines[i] -> remainder
buffer.remainingLines = lines.slice(i);
break;
//notice there's no way to re-run this once we leave here.
}
}
}
stream.on('drain',function(){
if (buffer.remainingLines.length){
for (var i = 0; i < buffer.remainingLines.length; i++) {
var written = stream.write(buffer.remainingLines[i]); //<-- the place to test
if (!written){
//do something here to wait till you can safely write again
//this means prepare a buffer and wait till you can come back to finish
// lines[i] -> remainder
buffer.remainingLines = lines.slice(i);
}
}
}
});