import.ts 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485
  1. import fs from 'fs';
  2. import path from 'path';
  3. import type { EventEmitter } from 'stream';
  4. import { Writable, Transform, pipeline } from 'stream';
  5. import { finished, pipeline as pipelinePromise } from 'stream/promises';
  6. import JSONStream from 'JSONStream';
  7. import gc from 'expose-gc/function';
  8. import type {
  9. BulkWriteResult, MongoBulkWriteError, UnorderedBulkOperation, WriteError,
  10. } from 'mongodb';
  11. import type { Document } from 'mongoose';
  12. import mongoose from 'mongoose';
  13. import unzipStream from 'unzip-stream';
  14. import { ImportMode } from '~/models/admin/import-mode';
  15. import type Crowi from '~/server/crowi';
  16. import { setupIndependentModels } from '~/server/crowi/setup-models';
  17. import type CollectionProgress from '~/server/models/vo/collection-progress';
  18. import { getGrowiVersion } from '~/utils/growi-version';
  19. import loggerFactory from '~/utils/logger';
  20. import CollectionProgressingStatus from '../../models/vo/collection-progressing-status';
  21. import { createBatchStream } from '../../util/batch-stream';
  22. import { configManager } from '../config-manager';
  23. import type { ConvertMap } from './construct-convert-map';
  24. import { constructConvertMap } from './construct-convert-map';
  25. import { getModelFromCollectionName } from './get-model-from-collection-name';
  26. import type { ImportSettings, OverwriteParams } from './import-settings';
  27. import { keepOriginal } from './overwrite-function';
  28. const logger = loggerFactory('growi:services:ImportService'); // eslint-disable-line no-unused-vars
  29. const BULK_IMPORT_SIZE = 100;
  30. class ImportingCollectionError extends Error {
  31. collectionProgress: CollectionProgress;
  32. constructor(collectionProgress, error) {
  33. super(error);
  34. this.collectionProgress = collectionProgress;
  35. }
  36. }
  37. export class ImportService {
  38. private crowi: Crowi;
  39. private growiBridgeService: any;
  40. private adminEvent: EventEmitter;
  41. private currentProgressingStatus: CollectionProgressingStatus | null;
  42. private convertMap: ConvertMap;
  43. constructor(crowi: Crowi) {
  44. this.crowi = crowi;
  45. this.growiBridgeService = crowi.growiBridgeService;
  46. this.adminEvent = crowi.event('admin');
  47. this.currentProgressingStatus = null;
  48. }
  49. get baseDir(): string {
  50. return path.join(this.crowi.tmpDir, 'imports');
  51. }
  52. getFile(fileName: string): string {
  53. return this.growiBridgeService.getFile(fileName, this.baseDir);
  54. }
  55. /**
  56. * parse all zip files in downloads dir
  57. *
  58. * @memberOf ExportService
  59. * @return {object} info for zip files and whether currentProgressingStatus exists
  60. */
  61. async getStatus() {
  62. const zipFiles = fs.readdirSync(this.baseDir).filter(file => path.extname(file) === '.zip');
  63. // process serially so as not to waste memory
  64. const zipFileStats: any[] = [];
  65. const parseZipFilePromises: Promise<any>[] = zipFiles.map((file) => {
  66. const zipFile = this.getFile(file);
  67. return this.growiBridgeService.parseZipFile(zipFile);
  68. });
  69. for await (const stat of parseZipFilePromises) {
  70. zipFileStats.push(stat);
  71. }
  72. // filter null object (broken zip)
  73. const filtered = zipFileStats
  74. .filter(zipFileStat => zipFileStat != null);
  75. // sort with ctime("Change Time" - Time when file status was last changed (inode data modification).)
  76. filtered.sort((a, b) => { return a.fileStat.ctime - b.fileStat.ctime });
  77. const zipFileStat = filtered.pop();
  78. let isTheSameVersion = false;
  79. if (zipFileStat != null) {
  80. try {
  81. this.validate(zipFileStat.meta);
  82. isTheSameVersion = true;
  83. }
  84. catch (err) {
  85. isTheSameVersion = false;
  86. logger.error('the versions are not met', err);
  87. }
  88. }
  89. return {
  90. isTheSameVersion,
  91. zipFileStat,
  92. isImporting: this.currentProgressingStatus != null,
  93. progressList: this.currentProgressingStatus?.progressList ?? null,
  94. };
  95. }
  96. async preImport() {
  97. await setupIndependentModels();
  98. // initialize convertMap
  99. this.convertMap = constructConvertMap();
  100. }
  101. /**
  102. * import collections from json
  103. * @param collections MongoDB collection name
  104. * @param importSettingsMap
  105. */
  106. async import(collections: string[], importSettingsMap: { [collectionName: string]: ImportSettings }): Promise<void> {
  107. await this.preImport();
  108. // init status object
  109. this.currentProgressingStatus = new CollectionProgressingStatus(collections);
  110. // process serially so as not to waste memory
  111. const promises = collections.map((collectionName) => {
  112. const importSettings = importSettingsMap[collectionName];
  113. return this.importCollection(collectionName, importSettings);
  114. });
  115. for await (const promise of promises) {
  116. try {
  117. await promise;
  118. }
  119. // catch ImportingCollectionError
  120. catch (err) {
  121. const { collectionProgress } = err;
  122. logger.error(`failed to import to ${collectionProgress.collectionName}`, err);
  123. this.emitProgressEvent(collectionProgress, { message: err.message });
  124. }
  125. }
  126. this.currentProgressingStatus = null;
  127. this.emitTerminateEvent();
  128. await configManager.loadConfigs();
  129. const currentIsV5Compatible = configManager.getConfig('app:isV5Compatible');
  130. const isImportPagesCollection = collections.includes('pages');
  131. const shouldNormalizePages = currentIsV5Compatible && isImportPagesCollection;
  132. if (shouldNormalizePages) await this.crowi.pageService.normalizeAllPublicPages();
  133. }
  134. /**
  135. * import a collection from json
  136. *
  137. * @memberOf ImportService
  138. */
  139. protected async importCollection(collectionName: string, importSettings: ImportSettings): Promise<void> {
  140. if (this.currentProgressingStatus == null) {
  141. throw new Error('Something went wrong: currentProgressingStatus is not initialized');
  142. }
  143. // prepare functions invoked from custom streams
  144. const convertDocuments = this.convertDocuments.bind(this);
  145. const bulkOperate = this.bulkOperate.bind(this);
  146. const execUnorderedBulkOpSafely = this.execUnorderedBulkOpSafely.bind(this);
  147. const emitProgressEvent = this.emitProgressEvent.bind(this);
  148. const collection = mongoose.connection.collection(collectionName);
  149. const { mode, jsonFileName, overwriteParams } = importSettings;
  150. const collectionProgress = this.currentProgressingStatus.progressMap[collectionName];
  151. try {
  152. const jsonFile = this.getFile(jsonFileName);
  153. // validate options
  154. this.validateImportSettings(collectionName, importSettings);
  155. // flush
  156. if (mode === ImportMode.flushAndInsert) {
  157. await collection.deleteMany({});
  158. }
  159. // stream 1
  160. const readStream = fs.createReadStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
  161. // stream 2
  162. const jsonStream = JSONStream.parse('*');
  163. // stream 3
  164. const convertStream = new Transform({
  165. objectMode: true,
  166. transform(doc, encoding, callback) {
  167. const converted = convertDocuments(collectionName, doc, overwriteParams);
  168. this.push(converted);
  169. callback();
  170. },
  171. });
  172. // stream 4
  173. const batchStream = createBatchStream(BULK_IMPORT_SIZE);
  174. // stream 5
  175. const writeStream = new Writable({
  176. objectMode: true,
  177. async write(batch, encoding, callback) {
  178. const unorderedBulkOp = collection.initializeUnorderedBulkOp();
  179. // documents are not persisted until unorderedBulkOp.execute()
  180. batch.forEach((document) => {
  181. bulkOperate(unorderedBulkOp, collectionName, document, importSettings);
  182. });
  183. // exec
  184. const { result, errors } = await execUnorderedBulkOpSafely(unorderedBulkOp);
  185. const { insertedCount, modifiedCount } = result;
  186. const errorCount = errors?.length ?? 0;
  187. logger.debug(`Importing ${collectionName}. Inserted: ${insertedCount}. Modified: ${modifiedCount}. Failed: ${errorCount}.`);
  188. const increment = insertedCount + modifiedCount + errorCount;
  189. collectionProgress.currentCount += increment;
  190. collectionProgress.totalCount += increment;
  191. collectionProgress.insertedCount += insertedCount;
  192. collectionProgress.modifiedCount += modifiedCount;
  193. emitProgressEvent(collectionProgress, errors);
  194. try {
  195. // First aid to prevent unexplained memory leaks
  196. logger.info('global.gc() invoked.');
  197. gc();
  198. }
  199. catch (err) {
  200. logger.error('fail garbage collection: ', err);
  201. }
  202. callback();
  203. },
  204. final(callback) {
  205. logger.info(`Importing ${collectionName} has completed.`);
  206. callback();
  207. },
  208. });
  209. await pipelinePromise(readStream, jsonStream, convertStream, batchStream, writeStream);
  210. // clean up tmp directory
  211. fs.unlinkSync(jsonFile);
  212. }
  213. catch (err) {
  214. throw new ImportingCollectionError(collectionProgress, err);
  215. }
  216. }
  217. /**
  218. *
  219. * @param {string} collectionName
  220. * @param {importSettings} importSettings
  221. */
  222. validateImportSettings(collectionName, importSettings) {
  223. const { mode } = importSettings;
  224. switch (collectionName) {
  225. case 'configs':
  226. if (mode !== ImportMode.flushAndInsert) {
  227. throw new Error(`The specified mode '${mode}' is not allowed when importing to 'configs' collection.`);
  228. }
  229. break;
  230. }
  231. }
  232. /**
  233. * process bulk operation
  234. * @param bulk MongoDB Bulk instance
  235. * @param collectionName collection name
  236. */
  237. bulkOperate(bulk, collectionName: string, document, importSettings: ImportSettings) {
  238. // insert
  239. if (importSettings.mode !== ImportMode.upsert) {
  240. return bulk.insert(document);
  241. }
  242. // upsert
  243. switch (collectionName) {
  244. case 'pages':
  245. return bulk.find({ path: document.path }).upsert().replaceOne(document);
  246. default:
  247. return bulk.find({ _id: document._id }).upsert().replaceOne(document);
  248. }
  249. }
  250. /**
  251. * emit progress event
  252. * @param {CollectionProgress} collectionProgress
  253. * @param {object} appendedErrors key: collection name, value: array of error object
  254. */
  255. emitProgressEvent(collectionProgress, appendedErrors) {
  256. const { collectionName } = collectionProgress;
  257. // send event (in progress in global)
  258. this.adminEvent.emit('onProgressForImport', { collectionName, collectionProgress, appendedErrors });
  259. }
  260. /**
  261. * emit terminate event
  262. */
  263. emitTerminateEvent() {
  264. this.adminEvent.emit('onTerminateForImport');
  265. }
  266. /**
  267. * extract a zip file
  268. *
  269. * @memberOf ImportService
  270. * @param {string} zipFile absolute path to zip file
  271. * @return {Array.<string>} array of absolute paths to extracted files
  272. */
  273. async unzip(zipFile) {
  274. const readStream = fs.createReadStream(zipFile);
  275. const parseStream = unzipStream.Parse();
  276. const unzipEntryStream = pipeline(readStream, parseStream, () => {});
  277. const files: string[] = [];
  278. unzipEntryStream.on('entry', (/** @type {Entry} */ entry) => {
  279. const fileName = entry.path;
  280. // https://regex101.com/r/mD4eZs/6
  281. // prevent from unexpecting attack doing unzip file (path traversal attack)
  282. // FOR EXAMPLE
  283. // ../../src/server/example.html
  284. if (fileName.match(/(\.\.\/|\.\.\\)/)) {
  285. logger.error('File path is not appropriate.', fileName);
  286. return;
  287. }
  288. if (fileName === this.growiBridgeService.getMetaFileName()) {
  289. // skip meta.json
  290. entry.autodrain();
  291. }
  292. else {
  293. const jsonFile = path.join(this.baseDir, fileName);
  294. const writeStream = fs.createWriteStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
  295. pipeline(entry, writeStream, () => {});
  296. files.push(jsonFile);
  297. }
  298. });
  299. await finished(unzipEntryStream);
  300. return files;
  301. }
  302. /**
  303. * execute unorderedBulkOp and ignore errors
  304. *
  305. * @memberOf ImportService
  306. */
  307. async execUnorderedBulkOpSafely(unorderedBulkOp: UnorderedBulkOperation): Promise<{ result: BulkWriteResult, errors?: WriteError[] }> {
  308. try {
  309. return {
  310. result: await unorderedBulkOp.execute(),
  311. };
  312. }
  313. catch (err) {
  314. const errTypeGuard = (err): err is MongoBulkWriteError => {
  315. return 'result' in err && 'writeErrors' in err;
  316. };
  317. if (errTypeGuard(err)) {
  318. return {
  319. result: err.result,
  320. errors: Array.isArray(err.writeErrors) ? err.writeErrors : [err.writeErrors],
  321. };
  322. }
  323. logger.error('Failed to execute unorderedBulkOp and the error could not handled.', err);
  324. throw new Error('Failed to execute unorderedBulkOp and the error could not handled.', err);
  325. }
  326. }
  327. /**
  328. * execute unorderedBulkOp and ignore errors
  329. *
  330. * @memberOf ImportService
  331. * @param collectionName
  332. * @param document document being imported
  333. * @returns document to be persisted
  334. */
  335. convertDocuments<D extends Document>(collectionName: string, document: D, overwriteParams: OverwriteParams): D {
  336. const Model = getModelFromCollectionName(collectionName);
  337. const schema = (Model != null) ? Model.schema : undefined;
  338. const convertMap = this.convertMap[collectionName];
  339. const _document: D = structuredClone(document);
  340. // apply keepOriginal to all of properties
  341. Object.entries(document).forEach(([propertyName, value]) => {
  342. _document[propertyName] = keepOriginal(value, { document, propertyName });
  343. });
  344. // Mongoose Model
  345. if (convertMap != null) {
  346. // assign value from documents being imported
  347. Object.entries(convertMap).forEach(([propertyName, convertedValue]) => {
  348. const value = document[propertyName];
  349. // distinguish between null and undefined
  350. if (value === undefined) {
  351. return; // next entry
  352. }
  353. const convertFunc = (typeof convertedValue === 'function') ? convertedValue : null;
  354. _document[propertyName] = (convertFunc != null) ? convertFunc(value, { document, propertyName, schema }) : convertedValue;
  355. });
  356. }
  357. // overwrite documents with custom values
  358. Object.entries(overwriteParams).forEach(([propertyName, overwriteValue]) => {
  359. const value = document[propertyName];
  360. // distinguish between null and undefined
  361. if (value !== undefined) {
  362. const overwriteFunc = (typeof overwriteValue === 'function') ? overwriteValue : null;
  363. _document[propertyName] = (overwriteFunc != null) ? overwriteFunc(value, { document: _document, propertyName, schema }) : overwriteValue;
  364. }
  365. });
  366. return _document;
  367. }
  368. /**
  369. * validate using meta.json
  370. * to pass validation, all the criteria must be met
  371. * - ${version of this GROWI} === ${version of GROWI that exported data}
  372. *
  373. * @memberOf ImportService
  374. * @param {object} meta meta data from meta.json
  375. */
  376. validate(meta) {
  377. if (meta.version !== getGrowiVersion()) {
  378. throw new Error('The version of this GROWI and the uploaded GROWI data are not the same');
  379. }
  380. // TODO: check if all migrations are completed
  381. // - export: throw err if there are pending migrations
  382. // - import: throw err if there are pending migrations
  383. }
  384. /**
  385. * Delete all uploaded files
  386. */
  387. deleteAllZipFiles() {
  388. fs.readdirSync(this.baseDir)
  389. .filter(file => path.extname(file) === '.zip')
  390. .forEach(file => fs.unlinkSync(path.join(this.baseDir, file)));
  391. }
  392. }