nodejs synchronization read large file line by line?

后端 未结 5 1131
没有蜡笔的小新
没有蜡笔的小新 2021-01-02 07:54

I have a large file (utf8). I know fs.createReadStream can create stream to read a large file, but not synchronized. So i try to use fs.readSync, b

5条回答
  •  难免孤独
    2021-01-02 08:19

    I built a simpler version JB Kohn's answer that uses split() on the buffer. It works on the larger files I tried.

    /*
     * Synchronously call fn(text, lineNum) on each line read from file descriptor fd.
     */
    function forEachLine (fd, fn) {
        var bufSize = 64 * 1024;
        var buf = new Buffer(bufSize);
        var leftOver = '';
        var lineNum = 0;
        var lines, n;
    
        while ((n = fs.readSync(fd, buf, 0, bufSize, null)) !== 0) {
            lines = buf.toString('utf8', 0 , n).split('\n');
            lines[0] = leftOver+lines[0];       // add leftover string from previous read
            while (lines.length > 1) {          // process all but the last line
                fn(lines.shift(), lineNum);
                lineNum++;
            }
            leftOver = lines.shift();           // save last line fragment (may be '')
        }
        if (leftOver) {                         // process any remaining line
            fn(leftOver, lineNum);
        }
    }
    

提交回复
热议问题