perf: process large queries in chunks

Signed-off-by: Miroslav Bajtoš <mbajtoss@gmail.com>
This commit is contained in:
Miroslav Bajtoš 2020-01-10 11:08:35 +01:00
parent 95178d10a6
commit d8ca3827ab
No known key found for this signature in database
GPG Key ID: 6F2304BA9361C7E3
1 changed files with 24 additions and 1 deletions

View File

@ -1667,7 +1667,30 @@ DataAccessObject.find = function find(query, options, cb) {
cb(err, results);
}
processQueryResults(data, done);
// FIXME(bajtos) read this value from Model settings
const CHUNK_SIZE = 500;
if (data.length <= CHUNK_SIZE) {
processQueryResults(data, done);
} else {
const chunks = [];
while (data.length)
chunks.push(data.splice(0, CHUNK_SIZE));
async.mapSeries(chunks, function processChunkOfQueryResults(data, next) {
let sync = true;
processQueryResults(data, (err, result) => {
if (sync) {
process.nextTick(next, err, result);
} else {
next(err, result);
}
});
sync = false;
}, (err, resultChunks) => {
if (err) return done(err);
const result = [].concat(...resultChunks);
done(null, result);
});
}
};
function processQueryResults(data, cb) {