migration.js 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. /* eslint-disable no-undef, no-var, vars-on-top, no-restricted-globals, regex/invalid, import/extensions */
  2. // ignore lint error because this file is js as mongoshell
  3. var processor = require('./processor.js');
  4. var pagesCollection = db.getCollection('pages');
  5. var revisionsCollection = db.getCollection('revisions');
  6. var operations = [];
  7. var batchSize = process.env.BATCH_SIZE ?? 100; // default 100 revisions in 1 bulkwrite
  8. var batchSizeInterval = process.env.BATCH_INTERVAL ?? 3000; // default 3 sec
  9. // ===========================================
  10. // replace method with processors
  11. // ===========================================
  12. function replaceLatestRevisions(body, processors) {
  13. var replacedBody = body;
  14. processors.forEach((processor) => {
  15. replacedBody = processor(replacedBody);
  16. });
  17. return replacedBody;
  18. }
  19. pagesCollection.find({}).forEach((doc) => {
  20. if (doc.revision) {
  21. var revision = revisionsCollection.findOne({ _id: doc.revision });
  22. var replacedBody = replaceLatestRevisions(revision.body, [...processor]);
  23. var operation = {
  24. updateOne: {
  25. filter: { _id: revision._id },
  26. update: {
  27. $set: { body: replacedBody },
  28. },
  29. },
  30. };
  31. operations.push(operation);
  32. // bulkWrite per 100 revisions
  33. if (operations.length > (batchSize - 1)) {
  34. revisionsCollection.bulkWrite(operations);
  35. // sleep time can be set from env var
  36. sleep(batchSizeInterval);
  37. operations = [];
  38. }
  39. }
  40. });
  41. revisionsCollection.bulkWrite(operations);
  42. print('migration complete!');