Taichi Masuyama 4 лет назад
Родитель
Сommit
f6c155864c

+ 16 - 8
packages/app/src/server/service/page-grant.ts

@@ -233,7 +233,7 @@ class PageGrantService {
       .addConditionToSortPagesByDescPath()
       .query
       .exec();
-    const testAncestor = ancestors[0];
+    const testAncestor = ancestors[0]; // TODO: consider when duplicate testAncestors exist
     if (testAncestor == null) {
       throw Error('testAncestor must exist');
     }
@@ -320,7 +320,9 @@ class PageGrantService {
 
   /**
    * About the rule of validation, see: https://dev.growi.org/61b2cdabaa330ce7d8152844
-   * Only v5 schema pages will be used to compare.
+   * Only v5 schema pages will be used to compare by default (Set includeNotMigratedPages to true to include v4 schema pages as well).
+   * When comparing, it will use path regex to collect pages instead of using parent attribute of the Page model. This is reasonable since
+   * using the path attribute is safer than using the parent attribute in this case. 2022.02.13 -- Taichi Masuyama
    * @returns Promise<boolean>
    */
   async isGrantNormalized(
@@ -344,7 +346,13 @@ class PageGrantService {
     return this.processValidation(comparableTarget, comparableAncestor, comparableDescendants);
   }
 
-  async separateNormalizedAndNonNormalizedPages(pageIds: ObjectIdLike[]): Promise<[(PageDocument & { _id: any })[], (PageDocument & { _id: any })[]]> {
+  /**
+   * Separate normalizable pages and NOT normalizable pages by PageService.prototype.isGrantNormalized method.
+   * normalizable pages = Pages which are able to run normalizeParentRecursively method (grant & userGroup rule is correct)
+   * @param pageIds pageIds to be tested
+   * @returns a tuple with the first element of normalizable pages and the second element of NOT normalizable pages
+   */
+  async separateNormalizableAndNotNormalizablePages(pageIds: ObjectIdLike[]): Promise<[(PageDocument & { _id: any })[], (PageDocument & { _id: any })[]]> {
     if (pageIds.length > LIMIT_FOR_MULTIPLE_PAGE_OP) {
       throw Error(`The maximum number of pageIds allowed is ${LIMIT_FOR_MULTIPLE_PAGE_OP}.`);
     }
@@ -354,8 +362,8 @@ class PageGrantService {
     const shouldCheckDescendants = true;
     const shouldIncludeNotMigratedPages = true;
 
-    const normalizedPages: (PageDocument & { _id: any })[] = [];
-    const nonNormalizedPages: (PageDocument & { _id: any })[] = []; // can be used to tell user which page failed to migrate
+    const normalizable: (PageDocument & { _id: any })[] = [];
+    const nonNormalizable: (PageDocument & { _id: any })[] = []; // can be used to tell user which page failed to migrate
 
     const builder = new PageQueryBuilder(Page.find());
     builder.addConditionToListByPageIdsArray(pageIds);
@@ -369,14 +377,14 @@ class PageGrantService {
 
       const isNormalized = await this.isGrantNormalized(path, grant, grantedUserIds, grantedGroupId, shouldCheckDescendants, shouldIncludeNotMigratedPages);
       if (isNormalized) {
-        normalizedPages.push(page);
+        normalizable.push(page);
       }
       else {
-        nonNormalizedPages.push(page);
+        nonNormalizable.push(page);
       }
     }
 
-    return [normalizedPages, nonNormalizedPages];
+    return [normalizable, nonNormalizable];
   }
 
 }

+ 23 - 21
packages/app/src/server/service/page.ts

@@ -1890,46 +1890,43 @@ class PageService {
       throw Error(`The maximum number of pageIds allowed is ${LIMIT_FOR_MULTIPLE_PAGE_OP}.`);
     }
 
-    let normalizedPages;
-    let nonNormalizedPages;
+    let normalizablePages;
+    let nonNormalizablePages;
     try {
-      [normalizedPages, nonNormalizedPages] = await this.crowi.pageGrantService.separateNormalizedAndNonNormalizedPages(pageIds);
+      [normalizablePages, nonNormalizablePages] = await this.crowi.pageGrantService.separateNormalizableAndNotNormalizablePages(pageIds);
     }
     catch (err) {
       throw err;
     }
 
-    if (normalizedPages.length === 0) {
+    if (normalizablePages.length === 0) {
       // socket.emit('normalizeParentRecursivelyByPageIds', { error: err.message }); TODO: use socket to tell user
       return;
     }
 
-    if (nonNormalizedPages.length !== 0) {
-      // TODO: iterate nonNormalizedPages and send socket error to client so that the user can know which path failed to migrate
+    if (nonNormalizablePages.length !== 0) {
+      // TODO: iterate nonNormalizablePages and send socket error to client so that the user can know which path failed to migrate
       // socket.emit('normalizeParentRecursivelyByPageIds', { error: err.message }); TODO: use socket to tell user
     }
 
-    // prepare no duplicated area paths
-    let pathsToNormalize = normalizedPages.map(p => p.path);
-    pathsToNormalize = omitDuplicateAreaPathFromPaths(pathsToNormalize);
+    const pagesToNormalize = omitDuplicateAreaPageFromPages(normalizablePages);
+    const pageIdsToNormalize = pagesToNormalize.map(p => p._id);
+    const pathsToNormalize = Array.from(new Set(pagesToNormalize.map(p => p.path)));
 
     // TODO: insertMany PageOperationBlock
 
-    const pageIdsToUpdateDescendantCount = nonNormalizedPages
-      .map((p): ObjectIdLike | undefined => (pathsToNormalize.includes(p.path) ? p._id : null))
-      .filter(id => id != null);
-
     // for updating descendantCount
-    const pageIdToExDescendantCount = new Map<ObjectIdLike, number>();
+    const pageIdToExDescendantCountMap = new Map<ObjectIdLike, number>();
 
     // MAIN PROCESS migrate recursively
+    const regexps = pathsToNormalize.map(p => new RegExp(`^${escapeStringRegexp(p)}`, 'i'));
     try {
-      for await (const path of pathsToNormalize) {
-        await this.normalizeParentRecursively(null, [new RegExp(`^${escapeStringRegexp(path)}`, 'i')]);
-      }
-      const pagesBeforeUpdatingDescendantCount = await Page.findByIdsAndViewer(pageIds, user);
+      await this.normalizeParentRecursively(null, regexps);
+
+      // find pages to save descendantCount of normalized pages (only pages in pageIds parameter of this method)
+      const pagesBeforeUpdatingDescendantCount = await Page.findByIdsAndViewer(pageIdsToNormalize, user);
       pagesBeforeUpdatingDescendantCount.forEach((p) => {
-        pageIdToExDescendantCount.set(p._id.toString(), p.descendantCount);
+        pageIdToExDescendantCountMap.set(p._id.toString(), p.descendantCount);
       });
     }
     catch (err) {
@@ -1941,13 +1938,18 @@ class PageService {
 
     // POST MAIN PROCESS update descendantCount
     try {
+      // update descendantCount of self and descendant pages first
       for await (const path of pathsToNormalize) {
         await this.updateDescendantCountOfSelfAndDescendants(path);
       }
 
-      const pagesAfterUpdatingDescendantCount = await Page.findByIdsAndViewer(pageIdsToUpdateDescendantCount, user);
+      // find pages again to get updated descendantCount
+      // then calculate inc
+      const pagesAfterUpdatingDescendantCount = await Page.findByIdsAndViewer(pageIdsToNormalize, user);
       for await (const page of pagesAfterUpdatingDescendantCount) {
-        const inc = (page.descendantCount) - (pageIdToExDescendantCount.get(page._id.toString()) || 0);
+        const exDescendantCount = pageIdToExDescendantCountMap.get(page._id.toString()) || 0;
+        const newDescendantCount = page.descendantCount;
+        const inc = newDescendantCount - exDescendantCount;
         await this.updateDescendantCountOfAncestors(page._id, inc, false);
       }
     }

+ 2 - 1
packages/core/src/utils/page-path-utils.ts

@@ -179,12 +179,13 @@ export const omitDuplicateAreaPathFromPaths = (paths: string[]): string[] => {
 
 /**
  * return pages with path without duplicate area of regexp /^${path}\/.+/i
+ * if the pages' path are the same, it will NOT omit any of them since the other attributes will not be the same
  * @param paths paths to be tested
  * @returns omitted paths
  */
 export const omitDuplicateAreaPageFromPages = (pages: any[]): any[] => {
   return pages.filter((page) => {
-    const isDuplicate = pages.filter(p => (new RegExp(`^${p.path}\\/.+`, 'i')).test(page.path)).length > 0;
+    const isDuplicate = pages.some(p => (new RegExp(`^${p.path}\\/.+`, 'i')).test(page.path));
 
     return !isDuplicate;
   });