Await an iterative function without delimiter in JS

后端 未结 2 1024
失恋的感觉
失恋的感觉 2020-12-03 20:49

I\'ve got a directory with an unknown amount of subfolders. Each subfolder might have or not further subfolders. I am itterating through them using a recursive function. Due

2条回答
  •  眼角桃花
    2020-12-03 21:02

    I would suggest building smaller functions with isolated concerns. Start with a files function that simply returns all files and the files of all sub-directories -

    const { readdir, stat } =
      require ("fs") .promises
    
    const { join } =
      require ("path")
    
    const files = async (path = ".") =>
      (await stat (path)) .isDirectory ()
        ? Promise
            .all
              ( (await readdir (path))
                  .map (f => files (join (path, f)))
              )
            .then
              ( results =>
                 [] .concat (...results)
              )
        : [ path ]
    
    files () .then (console.log, console.error)
    
    // [ './.main.js'
    // , './node_modules/anotherpackage/README.md'
    // , './node_modules/anotherpackage/package.json'
    // , './node_modules/anotherpackage/index.js'
    // , './node_modules/somepackage/.npmignore'
    // , './node_modules/somepackage/LICENSE'
    // , './node_modules/somepackage/README.md'
    // , './node_modules/somepackage/package.json'
    // , './node_modules/somepackage/index.js'
    // , './node_modules/somepackage/test/test.js'
    // , './package.json'
    // ]
    

    Then make a search function which depends on files and adds the capability to filter results -

    const { basename } =
      require ("path")
    
    const search = async (query, path = ".") =>
      (await files (path))
        .filter (x => basename (x) === query)
    
    search ("package.json", ".")
      .then (console.log, console.error)
    
    // [ './node_modules/anotherpackage/package.json'
    // , './node_modules/somepackage/package.json'
    // , './package.json'
    // ]
    

    Then make your readPackages function which depends on search and adds the capability to read/parse the packages -

    const { readFile } =
      require ("fs") .promises
    
    const readPackages = async (path = ".") =>
      Promise
        .all
          ( (await search ("package.json", path))
              .map (package => readFile (package))
          )
        .then
          ( buffers =>
              buffers .map (b => JSON .parse (String (b)))
          )
    
    readPackages ('.')
      .then (console.log, console.error)
    
    // [ 
    // , 
    // , 
    // ]
    

    Finally, notice how jsonTable is no longer a global. Instead all data is nicely contained and flowing through our sequence of promises.


    If you'd like the transform the packages as you're reading them, you can make transform a parameter of the readPackages function. This keeps it generic and allows you to read package contents in a user-specified way -

    const readPackages = async (transform, path = ".") =>
      Promise
        .all
          ( (await search ("package.json", path))
              .map (package => readFile (package))
          )
        .then
          ( buffers =>
              buffers .map (b => transform (JSON .parse (String (b))))
          )
    
    readPackages
      ( ({ name }) => ({ name }) 
      , '.'
      )
      .then (console.log, console.error)
    
    // [ { name: 'anotherpackage' }
    // , { name: 'somepackage' }
    // , { name: 'mypackage' }
    // ]
    

    Or get name, version, and license -

    readPackages
      ( ({ name, version, license = "None" }) =>
          ({ name, version, license }) 
      , '.'
      )
      .then (console.log, console.error)
    
    // [ { name: 'anotherpackage', version: '1.0.0', license: 'None' }
    // , { name: 'somepackage', version: '3.2.1', license: 'MIT' }
    // , { name: 'mypackage', version: '1.2.3', license: 'BSD-3-Clause' }
    // ]
    

    Now in these simplified programs, we start to see some patterns emerging. To make our intentions more clear and avoid repeating ourselves, we design a reusable module -

    const Parallel = p =>
      ( { map: async f =>
            Promise .all ((await p) .map (x => f (x)))
        , filter: async f =>
            (await p) .filter (x => f (x))
        , flatMap: async f =>
            Promise .all ((await p) .map (x => f (x))) .then (ys => [] .concat (...ys))
        , // ...
        }
      )
    

    Now our files function is a lot nicer -

    const files = async (path = ".") =>
      (await stat (path)) .isDirectory ()
        ? Parallel (readdir (path))
            .flatMap (f => files (join (path, f)))
        : [ path ]
    

    Our search function is cleaned up a bit too -

    const search = async (query, path = ".") =>
      Parallel (files (path))
        .filter (x => basename (x) === query)
    

    Finally, readPackages -

    const readPackages = async (f, path = ".") =>
      Parallel (search ("package.json", path))
        .map (readFile)
        .then
          ( buffers =>
              buffers .map (b => f (JSON .parse (String (b))))
          )
    

    Behavior of each function is identical to the original implementations. Only now we have even more generic functions which can be reused in other areas of our program.

    In this related Q&A, we use the Parallel module to implement a dirs function which recursively lists all directories at a given path.

提交回复
热议问题