ImportEtherpad: Batch database writes

This commit is contained in:
Richard Hansen 2022-04-19 16:47:54 -04:00
parent c1652fd695
commit 44fd70491d
2 changed files with 7 additions and 4 deletions

View File

@ -39,6 +39,8 @@
timeouts with large pads.
* Exporting a large pad to `.etherpad` format should be faster thanks to bulk
database record fetches.
* When importing an `.etherpad` file, records are now saved to the database in
batches to avoid database timeouts with large pads.
#### For plugin authors

View File

@ -105,8 +105,9 @@ exports.setPadRaw = async (padId, r, authorId = '') => {
await padDb.close();
}
await Promise.all([
...[...data].map(([k, v]) => q.pushAsync(() => db.set(k, v))),
...[...existingAuthors].map((a) => q.pushAsync(() => authorManager.addPad(a, padId))),
]);
const writeOps = (function* () {
for (const [k, v] of data) yield db.set(k, v);
for (const a of existingAuthors) yield authorManager.addPad(a, padId);
})();
for (const op of new Stream(writeOps).batch(100).buffer(99)) await op;
};