Asynchronously reading and caching multiple files in nodejs

三世轮回 提交于 2019-11-28 22:08:20

When your callback to readFile executes, the for loop will already have finished. So i will be files.length and files[i] will be undefined. To mitigate this, you need to wrap the variables in a closure. The simplest way to do this is to create a function which does your readFile call, and call that in the loop:

function read(file) {
    require('fs').readFile(file, 'utf8', function (error,data) {
        cache[file]=data;
    });
}

for(var i = 0; i < files.length; i++){
    read(files[i]);
}

For even better execution control, you might want to look into async:

function readAsync(file, callback) {
    fs.readFile(file, 'utf8', callback);
}

async.map(files, readAsync, function(err, results) {
    // results = ['file 1 content', 'file 2 content', ...]
});

Edit: Made use of helper function for async example.

The existing answer didn't work for me. I did find an NPM package which did the job: https://www.npmjs.com/package/read-multiple-files. After npm install read-multiple-files at the command line, here's the code I used:

var files = ['1.html', '2.html', '3.html'];

console.log("\n");

readMultipleFiles(files, 'utf8', function(err, inputFiles) {
  if(err) {
    console.log("Read Error: " + err);
  }

  fileOne = inputFiles[0];
  fileTwo = inputFiles[1];
  ...

  console.log(fileOne);
  console.log(fileTwo);

});
易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!