perf: process large queries in chunks
Signed-off-by: Miroslav Bajtoš <mbajtoss@gmail.com>
This commit is contained in:
parent
95178d10a6
commit
d8ca3827ab
25
lib/dao.js
25
lib/dao.js
|
@ -1667,7 +1667,30 @@ DataAccessObject.find = function find(query, options, cb) {
|
|||
cb(err, results);
|
||||
}
|
||||
|
||||
processQueryResults(data, done);
|
||||
// FIXME(bajtos) read this value from Model settings
|
||||
const CHUNK_SIZE = 500;
|
||||
if (data.length <= CHUNK_SIZE) {
|
||||
processQueryResults(data, done);
|
||||
} else {
|
||||
const chunks = [];
|
||||
while (data.length)
|
||||
chunks.push(data.splice(0, CHUNK_SIZE));
|
||||
async.mapSeries(chunks, function processChunkOfQueryResults(data, next) {
|
||||
let sync = true;
|
||||
processQueryResults(data, (err, result) => {
|
||||
if (sync) {
|
||||
process.nextTick(next, err, result);
|
||||
} else {
|
||||
next(err, result);
|
||||
}
|
||||
});
|
||||
sync = false;
|
||||
}, (err, resultChunks) => {
|
||||
if (err) return done(err);
|
||||
const result = [].concat(...resultChunks);
|
||||
done(null, result);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
function processQueryResults(data, cb) {
|
||||
|
|
Loading…
Reference in New Issue