|
|
@@ -1,3 +1,4 @@
|
|
|
+import { createHash } from 'crypto';
|
|
|
import fs from 'fs';
|
|
|
import path from 'path';
|
|
|
import type { Readable } from 'stream';
|
|
|
@@ -128,6 +129,7 @@ class PageBulkExportService {
|
|
|
if (duplicatePageBulkExportJobInProgress != null) {
|
|
|
throw new DuplicateBulkExportJobError();
|
|
|
}
|
|
|
+
|
|
|
const pageBulkExportJob: PageBulkExportJobDocument & HasObjectId = await PageBulkExportJob.create({
|
|
|
user: currentUser, page: basePage, format, status: PageBulkExportJobStatus.initializing,
|
|
|
});
|
|
|
@@ -147,7 +149,22 @@ class PageBulkExportService {
|
|
|
|
|
|
if (pageBulkExportJob.status === PageBulkExportJobStatus.initializing) {
|
|
|
await this.createPageSnapshots(user, pageBulkExportJob);
|
|
|
- pageBulkExportJob.status = PageBulkExportJobStatus.exporting;
|
|
|
+
|
|
|
+ const duplicateExportJob = await PageBulkExportJob.findOne({
|
|
|
+ user: pageBulkExportJob.user,
|
|
|
+ page: pageBulkExportJob.page,
|
|
|
+ format: pageBulkExportJob.format,
|
|
|
+ status: PageBulkExportJobStatus.completed,
|
|
|
+ revisionListHash: pageBulkExportJob.revisionListHash,
|
|
|
+ });
|
|
|
+ if (duplicateExportJob != null) {
|
|
|
+ // if an upload with the exact same contents exists, re-use the same attachment of that upload
|
|
|
+ pageBulkExportJob.attachment = duplicateExportJob.attachment;
|
|
|
+ pageBulkExportJob.status = PageBulkExportJobStatus.completed;
|
|
|
+ }
|
|
|
+ else {
|
|
|
+ pageBulkExportJob.status = PageBulkExportJobStatus.exporting;
|
|
|
+ }
|
|
|
await pageBulkExportJob.save();
|
|
|
}
|
|
|
if (pageBulkExportJob.status === PageBulkExportJobStatus.exporting) {
|
|
|
@@ -197,7 +214,8 @@ class PageBulkExportService {
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * Create a snapshot for each page that is to be exported in the pageBulkExportJob
|
|
|
+ * Create a snapshot for each page that is to be exported in the pageBulkExportJob.
|
|
|
+ * Also calulate revisionListHash and save it to the pageBulkExportJob.
|
|
|
*/
|
|
|
private async createPageSnapshots(user, pageBulkExportJob: PageBulkExportJobDocument): Promise<void> {
|
|
|
// if the process of creating snapshots was interrupted, delete the snapshots and create from the start
|
|
|
@@ -208,6 +226,8 @@ class PageBulkExportService {
|
|
|
throw new Error('Base page not found');
|
|
|
}
|
|
|
|
|
|
+ const revisionListHash = createHash('sha256');
|
|
|
+
|
|
|
// create a Readable for pages to be exported
|
|
|
const { PageQueryBuilder } = this.pageModel;
|
|
|
const builder = await new PageQueryBuilder(this.pageModel.find())
|
|
|
@@ -223,6 +243,9 @@ class PageBulkExportService {
|
|
|
objectMode: true,
|
|
|
write: async(page: PageDocument, encoding, callback) => {
|
|
|
try {
|
|
|
+ if (page.revision != null) {
|
|
|
+ revisionListHash.update(getIdForRef(page.revision).toString());
|
|
|
+ }
|
|
|
await PageBulkExportPageSnapshot.create({
|
|
|
pageBulkExportJob,
|
|
|
path: page.path,
|
|
|
@@ -240,6 +263,9 @@ class PageBulkExportService {
|
|
|
this.pageBulkExportJobStreamManager.addJobStream(pageBulkExportJob._id, pagesReadable);
|
|
|
|
|
|
await pipelinePromise(pagesReadable, pageSnapshotsWritable);
|
|
|
+
|
|
|
+ pageBulkExportJob.revisionListHash = revisionListHash.digest('hex');
|
|
|
+ await pageBulkExportJob.save();
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
@@ -302,7 +328,8 @@ class PageBulkExportService {
|
|
|
const pageArchiver = this.setUpPageArchiver();
|
|
|
const bufferToPartSizeTransform = getBufferToFixedSizeTransform(this.maxPartSize);
|
|
|
|
|
|
- const originalName = `${pageBulkExportJob._id}.${this.compressExtension}`;
|
|
|
+ if (pageBulkExportJob.revisionListHash == null) throw new Error('revisionListHash is not set');
|
|
|
+ const originalName = `${pageBulkExportJob.revisionListHash}.${this.compressExtension}`;
|
|
|
const attachment = Attachment.createWithoutSave(null, user, originalName, this.compressExtension, 0, AttachmentType.PAGE_BULK_EXPORT);
|
|
|
const uploadKey = `${FilePathOnStoragePrefix.pageBulkExport}/${attachment.fileName}`;
|
|
|
|