Assume we have such a program:
// imagine the string1 to string1000 are very long strings, which will take a while to be written to file system
var arr = [\"
You can synchronize the access to the file using the read/write locking for node, please see the following example an you could read the documentation
var ReadWriteLock = require('rwlock');
var lock = new ReadWriteLock();
lock.writeLock(function (release) {
fs.appendFile(fileName, addToFile, function(err, data) {
if(err)
console.log("write error"); //logging error message
else
console.log("write ok");
release(); // unlock
});
});
The docs say that
Note that it is unsafe to use
fs.writemultiple times on the same file without waiting for the callback. For this scenario, fs.createWriteStream is strongly recommended.
Using a stream works because streams inherently guarantee that the order of strings being written to them is the same order that is read out of them.
var stream = fs.createWriteStream("./same/path/file.txt");
stream.on('error', console.error);
arr.forEach((str) => {
stream.write(str + '\n');
});
stream.end();
Another way to still use fs.write but also make sure things happen in order is to use promises to maintain the sequential logic.
function writeToFilePromise(str) {
return new Promise((resolve, reject) => {
fs.write("./same/path/file.txt", str, {flag: "a"}}, (err) => {
if (err) return reject(err);
resolve();
});
});
}
// for every string,
// write it to the file,
// then write the next one once that one is finished and so on
arr.reduce((chain, str) => {
return chain
.then(() => writeToFilePromise(str));
}, Promise.resolve());