Download large file with node.js avoiding high memory consumption

后端 未结 6 926
梦毁少年i
梦毁少年i 2020-12-12 15:35

I`m trying to create a file downloader as a background service but when a large file is scheduled, it\'s first put in memory and then, at the end of the download the file is

相关标签:
6条回答
  • 2020-12-12 15:38

    does the request package work for your uses?

    it lets you do things like this:

    request(downloadurl).pipe(fs.createWriteStream(downloadtohere))
    
    0 讨论(0)
  • 2020-12-12 15:43

    Instead of holding the content into memory in the "data" event listener you should write to the file in append mode.

    0 讨论(0)
  • 2020-12-12 15:49

    Use streams like Carter Cole suggested. Here is a more complete example

    var inspect = require('eyespect').inspector();
    var request = require('request');
    var filed = require('filed');
    var temp = require('temp');
    var downloadURL = 'http://upload.wikimedia.org/wikipedia/commons/e/ec/Hazard_Creek_Kayaker.JPG';
    var downloadPath = temp.path({prefix: 'singlePageRaw', suffix: '.jpg'});
    
    var downloadFile = filed(downloadPath);
    var r = request(downloadURL).pipe(downloadFile);
    
    
    r.on('data', function(data) {
      inspect('binary data received');
    });
    downloadFile.on('end', function () {
      inspect(downloadPath, 'file downloaded to path');
    });
    
    downloadFile.on('error', function (err) {
      inspect(err, 'error downloading file');
    });
    

    You may need to install modules which you can do via npm install filed request eyespect temp

    0 讨论(0)
  • 2020-12-12 15:54

    I changed the callback to:

    request.addListener('response', function (response) {
            var downloadfile = fs.createWriteStream(filename, {'flags': 'a'});
            sys.puts("File size " + filename + ": " + response.headers['content-length'] + " bytes.");
            response.addListener('data', function (chunk) {
                dlprogress += chunk.length;
                downloadfile.write(chunk, encoding='binary');
            });
            response.addListener("end", function() {
                downloadfile.end();
                sys.puts("Finished downloading " + filename);
            });
    
        });
    

    This worked perfectly.

    0 讨论(0)
  • 2020-12-12 15:56

    When downloading large file please use fs.write and not writeFile as it will override the previous content.

    function downloadfile(res) {
        var requestserver = http.request(options, function(r) {
            console.log('STATUS: ' + r.statusCode);
            console.log('HEADERS: ' + JSON.stringify(r.headers));
    
            var fd = fs.openSync('sai.tar.gz', 'w');
    
            r.on('data', function (chunk) {
                size += chunk.length;
                console.log(size+'bytes received');
                sendstatus(res,size);
                fs.write(fd, chunk, 0, chunk.length, null, function(er, written) {
                });
            });
            r.on('end',function(){
                console.log('\nended from server');
                fs.closeSync(fd);
                sendendstatus(res);
            });
        });
    }
    
    0 讨论(0)
  • 2020-12-12 15:57

    Take a look at http-request:

    // shorthand syntax, buffered response
    http.get('http://localhost/get', function (err, res) {
        if (err) throw err;
        console.log(res.code, res.headers, res.buffer.toString());
    });
    
    // save the response to 'myfile.bin' with a progress callback
    http.get({
        url: 'http://localhost/get',
        progress: function (current, total) {
            console.log('downloaded %d bytes from %d', current, total);
        }
    }, 'myfile.bin', function (err, res) {
        if (err) throw err;
        console.log(res.code, res.headers, res.file);
    });
    
    0 讨论(0)
提交回复
热议问题