I\'ve got a very long array of objects that is about 100,000 items in size, and just before I got to write it to file, I pass the data into JSON.stringify
I get this
I find JSONStream to be a reliable alternative to the native JSON.stringify that works well with large objects. For example:
var fileSystem = require( "fs" );
var JSONStream = require( "JSONStream" );
var records = [
{ id: 1, name: "Terminator" },
{ id: 2, name: "Predator" },
{ id: 3, name: "True Lies" },
{ id: 4, name: "Running Man" },
{ id: 5, name: "Twins" }
// .... hundreds of thousands of records ....
];
var transformStream = JSONStream.stringify();
var outputStream = fileSystem.createWriteStream( __dirname + "/data.json" );
transformStream.pipe( outputStream );
records.forEach( transformStream.write );
transformStream.end();
outputStream.on(
"finish",
function handleFinish() {
console.log("Done");
}
);
Took the sample code from here.