Taichi Masuyama 4 лет назад
Родитель
Сommit
8517e6db7a

+ 1 - 1
packages/app/src/server/models/page.js

@@ -38,7 +38,7 @@ const STATUS_DELETED = 'deleted';
 
 const pageSchema = new mongoose.Schema({
   parent: {
-    type: ObjectId, ref: 'Page', default: null,
+    type: ObjectId, ref: 'Page', index: true, default: null,
   },
   isEmpty: { type: Boolean, default: false },
   path: {

+ 10 - 0
packages/app/src/server/routes/apiv3/pages.js

@@ -684,6 +684,16 @@ module.exports = (crowi) => {
   // TODO: handle 'notNow' and 'upgrade' to either set config to false or start/resume migration 80202
   // TODO: use socket conn to show progress
   router.post('/v5-schema-migration', /* accessTokenParser, loginRequired, adminRequired, csrf, */ async(req, res) => {
+    const Page = crowi.model('Page');
+    await Page.insertMany([
+      { path: '/ccccccc' },
+      { path: '/ccccccc/B1' },
+      { path: '/ccccccc/B2' },
+      { path: '/ccccccc/B1/C1' },
+      { path: '/ccccccc/B2/C1' },
+      { path: '/ccccccc/B2/C2/D1' },
+      { path: '/ccccccc/B3/f/f/f/f' },
+    ]);
     try {
       const Page = crowi.model('Page');
       // TODO: not await but should be dealed as a job

+ 13 - 18
packages/app/src/server/service/page.js

@@ -740,10 +740,13 @@ class PageService {
 
   async v5RecursiveMigration(grant, rootPath = null) {
     const BATCH_SIZE = 100;
+    const PAGES_LIMIT = 3000;
     const Page = this.crowi.model('Page');
     const { PageQueryBuilder } = Page;
 
-    const randomPagesStream = await Page
+    const total = await Page.countDocuments({ grant, parent: null });
+
+    let baseAggregation = Page
       .aggregate([
         {
           $match: {
@@ -755,23 +758,19 @@ class PageService {
           $project: { // minimize data to fetch
             _id: 1,
             path: 1,
-            pathLength: { $strLenCP: '$path' }, // calculate path length
           },
         },
-        {
-          $sort: { pathLength: -1 }, // get less same parent path
-        },
-      ])
-      .cursor({ batchSize: BATCH_SIZE }) // get stream
-      .exec();
+      ]);
 
-    // use batch stream
-    const batchStream = createBatchStream(BATCH_SIZE);
+    // limit pages to get
+    if (total > PAGES_LIMIT) {
+      baseAggregation = baseAggregation.limit(Math.floor(total * 0.3));
+    }
 
+    const randomPagesStream = await baseAggregation.cursor({ batchSize: BATCH_SIZE }).exec();
 
-    // determines the frequency of skipping a chunk (when N, chunks will be skipped when count is N * count)
-    const DROPOUT_MULTIPLIER = 2;
-    let count = 0;
+    // use batch stream
+    const batchStream = createBatchStream(BATCH_SIZE);
 
     let countPages = 0;
 
@@ -779,10 +778,6 @@ class PageService {
     const migratePagesStream = new Writable({
       objectMode: true,
       async write(pages, encoding, callback) {
-        // dropout
-        count++;
-        if (count % DROPOUT_MULTIPLIER !== 0) return callback();
-
         // make list to create empty pages
         const parentPathsSet = new Set(pages.map(page => pathlib.dirname(page.path)));
         const parentPaths = Array.from(parentPathsSet);
@@ -847,7 +842,7 @@ class PageService {
       .pipe(migratePagesStream);
 
     await streamToPromise(migratePagesStream);
-    if (await Page.exists({ grant, parent: null })) {
+    if (await Page.exists({ grant, parent: null, path: { $ne: '/' } })) {
       await this.v5RecursiveMigration(grant, rootPath);
     }
   }