哪种迭代目录最有效?

时间:2017-06-18 20:52:01

标签: javascript asynchronous promise

假设我有一个目录foo,有一些子目录。这些子目录中的每一个都有0到5个可变长度的文件,我想处理它们。我的初始代码如下:

    pool.query(`
      SET SEARCH_PATH TO public,os_local;
    `).then(() => fs.readdirSync(srcpath)
        .filter(file => fs.lstatSync(path.join(srcpath, file)).isDirectory())
        .map(dir => {
          fs.access(`${srcpath + dir}/${dir}_Building.shp`, fs.constants.R_OK, (err) => {
            if (!err) {
              openShapeFile(`${srcpath + dir}/${dir}_Building.shp`).then((source) => source.read()
.then(function dbWrite (result) {
              if (result.done) {
                console.log(`done ${dir}`)
              } else {
    const query = `INSERT INTO os_local.buildings(geometry,
                  id,
                  featcode,
                  version)
                  VALUES(os_local.ST_GeomFromGeoJSON($1),
                  $2,
                  $3,
                  $4) ON CONFLICT (id) DO UPDATE SET
                    featcode=$3,
                    geometry=os_local.ST_GeomFromGeoJSON($1),
                    version=$4;`
                return pool.connect().then(client => {
                  client.query(query, [geoJson.split('"[[').join('[[').split(']]"').join(']]'),
                    result.value.properties.ID,
                    result.value.properties.FEATCODE,
                    version
                  ]).then((result) => {
                    return source.read().then(dbWrite)
                  }).catch((err) => {
                    console.log(err,
                      query,
                      geoJson.split('"[[').join('[[').split(']]"').join(']]'),
                      result.value.properties.ID,
                      result.value.properties.FEATCODE,
                      version
                    )
                    return source.read().then(dbWrite)
                  })
                  client.release()
                })
              }
            })).catch(err => console.log('No Buildings', err))
            }
          })

          fs.access(`${srcpath + dir}/${dir}__ImportantBuilding.shp`, fs.constants.R_OK, (err) => {
            //read file one line at a time
            //spin up connection in pg.pool, insert data
          })

          fs.access(`${srcpath + dir}/${dir}_Road.shp`, fs.constants.R_OK, (err) => {
            //read file one line at a time
            //spin up connection in pg.pool, insert data
          })

          fs.access(`${srcpath + dir}/${dir}_Glasshouse.shp`, fs.constants.R_OK, (err) => {
            //read file one line at a time
            //spin up connection in pg.pool, insert data
          })

          fs.access(`${srcpath + dir}/${dir}_RailwayStation.shp`, fs.constants.R_OK, (err) => {
            //read file one line at a time
            //spin up connection in pg.pool, insert data
          })
        })

这大部分都有效,但它最终必须等待在每个子目录中完整处理最长的文件,导致实践中始终只有1个连接到数据库。

有没有办法可以重新构建这个以更好地利用我的计算资源,同时限制活动postgres连接的数量并强制代码等到连接可用? (我在pg poolConfig中将它们设置为20,用于node-postgres)

2 个答案:

答案 0 :(得分:-1)

如果您需要在一定时间内依次处理文件,则可以使用Streams,计时器(用于计划)和process.nextTick()。 有great manual用于理解nodejs中的流。

答案 1 :(得分:-2)

以下是使用生成器获取目录内容的示例。您可以立即开始获取前几个文件,然后使用异步代码并行处理文件。

// Dependencies
const fs = require('fs');
const path = require('path');

// The generator function (note the asterisk)
function* getFilesInDirectory(fullPath, recursive = false) {
    // Convert file names to full paths
    let contents = fs.readdirSync(fullPath).map(file => {
        return path.join(fullPath, file);
    });

    for(let i = 0; i < contents.length; i++) {
        const childPath = contents[i];
        let stats = fs.statSync(childPath);
        if (stats.isFile()) {
            yield childPath;
        } else if (stats.isDirectory() && recursive) {
            yield* getFilesInDirectory(childPath, true);
        }
    }
}

用法:

function handleResults(results) {
    ... // Returns a promise
}

function processFile(file) {
    ... // Returns a promise
}

var files = getFilesInDirectory(__dirname, true);
var result = files.next();
var promises = [];
while(!result.done) {
    console.log(result.value);
    file = files.next();
    // Process files in parallel
    var promise = processFile(file).then(handleResults);
    promises.push(promise);
}

promise.all(promises).then() {
    console.log(done);
}