import.ts 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484
  1. import fs from 'fs';
  2. import path from 'path';
  3. import type { EventEmitter } from 'stream';
  4. import { Writable, Transform, pipeline } from 'stream';
  5. import { finished, pipeline as pipelinePromise } from 'stream/promises';
  6. import JSONStream from 'JSONStream';
  7. import gc from 'expose-gc/function';
  8. import type {
  9. BulkWriteResult, MongoBulkWriteError, UnorderedBulkOperation, WriteError,
  10. } from 'mongodb';
  11. import type { Document } from 'mongoose';
  12. import mongoose from 'mongoose';
  13. import unzipStream from 'unzip-stream';
  14. import { ImportMode } from '~/models/admin/import-mode';
  15. import type Crowi from '~/server/crowi';
  16. import { setupIndependentModels } from '~/server/crowi/setup-models';
  17. import type CollectionProgress from '~/server/models/vo/collection-progress';
  18. import loggerFactory from '~/utils/logger';
  19. import CollectionProgressingStatus from '../../models/vo/collection-progressing-status';
  20. import { createBatchStream } from '../../util/batch-stream';
  21. import { configManager } from '../config-manager';
  22. import type { ConvertMap } from './construct-convert-map';
  23. import { constructConvertMap } from './construct-convert-map';
  24. import { getModelFromCollectionName } from './get-model-from-collection-name';
  25. import type { ImportSettings, OverwriteParams } from './import-settings';
  26. import { keepOriginal } from './overwrite-function';
  27. const logger = loggerFactory('growi:services:ImportService'); // eslint-disable-line no-unused-vars
  28. const BULK_IMPORT_SIZE = 100;
  29. class ImportingCollectionError extends Error {
  30. collectionProgress: CollectionProgress;
  31. constructor(collectionProgress, error) {
  32. super(error);
  33. this.collectionProgress = collectionProgress;
  34. }
  35. }
  36. export class ImportService {
  37. private crowi: Crowi;
  38. private growiBridgeService: any;
  39. private adminEvent: EventEmitter;
  40. private currentProgressingStatus: CollectionProgressingStatus | null;
  41. private convertMap: ConvertMap;
  42. constructor(crowi: Crowi) {
  43. this.crowi = crowi;
  44. this.growiBridgeService = crowi.growiBridgeService;
  45. this.adminEvent = crowi.event('admin');
  46. this.currentProgressingStatus = null;
  47. }
  48. get baseDir(): string {
  49. return path.join(this.crowi.tmpDir, 'imports');
  50. }
  51. getFile(fileName: string): string {
  52. return this.growiBridgeService.getFile(fileName, this.baseDir);
  53. }
  54. /**
  55. * parse all zip files in downloads dir
  56. *
  57. * @memberOf ExportService
  58. * @return {object} info for zip files and whether currentProgressingStatus exists
  59. */
  60. async getStatus() {
  61. const zipFiles = fs.readdirSync(this.baseDir).filter(file => path.extname(file) === '.zip');
  62. // process serially so as not to waste memory
  63. const zipFileStats: any[] = [];
  64. const parseZipFilePromises: Promise<any>[] = zipFiles.map((file) => {
  65. const zipFile = this.getFile(file);
  66. return this.growiBridgeService.parseZipFile(zipFile);
  67. });
  68. for await (const stat of parseZipFilePromises) {
  69. zipFileStats.push(stat);
  70. }
  71. // filter null object (broken zip)
  72. const filtered = zipFileStats
  73. .filter(zipFileStat => zipFileStat != null);
  74. // sort with ctime("Change Time" - Time when file status was last changed (inode data modification).)
  75. filtered.sort((a, b) => { return a.fileStat.ctime - b.fileStat.ctime });
  76. const zipFileStat = filtered.pop();
  77. let isTheSameVersion = false;
  78. if (zipFileStat != null) {
  79. try {
  80. this.validate(zipFileStat.meta);
  81. isTheSameVersion = true;
  82. }
  83. catch (err) {
  84. isTheSameVersion = false;
  85. logger.error('the versions are not met', err);
  86. }
  87. }
  88. return {
  89. isTheSameVersion,
  90. zipFileStat,
  91. isImporting: this.currentProgressingStatus != null,
  92. progressList: this.currentProgressingStatus?.progressList ?? null,
  93. };
  94. }
  95. async preImport() {
  96. await setupIndependentModels();
  97. // initialize convertMap
  98. this.convertMap = constructConvertMap();
  99. }
  100. /**
  101. * import collections from json
  102. * @param collections MongoDB collection name
  103. * @param importSettingsMap
  104. */
  105. async import(collections: string[], importSettingsMap: { [collectionName: string]: ImportSettings }): Promise<void> {
  106. await this.preImport();
  107. // init status object
  108. this.currentProgressingStatus = new CollectionProgressingStatus(collections);
  109. // process serially so as not to waste memory
  110. const promises = collections.map((collectionName) => {
  111. const importSettings = importSettingsMap[collectionName];
  112. return this.importCollection(collectionName, importSettings);
  113. });
  114. for await (const promise of promises) {
  115. try {
  116. await promise;
  117. }
  118. // catch ImportingCollectionError
  119. catch (err) {
  120. const { collectionProgress } = err;
  121. logger.error(`failed to import to ${collectionProgress.collectionName}`, err);
  122. this.emitProgressEvent(collectionProgress, { message: err.message });
  123. }
  124. }
  125. this.currentProgressingStatus = null;
  126. this.emitTerminateEvent();
  127. await configManager.loadConfigs();
  128. const currentIsV5Compatible = configManager.getConfig('app:isV5Compatible');
  129. const isImportPagesCollection = collections.includes('pages');
  130. const shouldNormalizePages = currentIsV5Compatible && isImportPagesCollection;
  131. if (shouldNormalizePages) await this.crowi.pageService.normalizeAllPublicPages();
  132. }
  133. /**
  134. * import a collection from json
  135. *
  136. * @memberOf ImportService
  137. */
  138. protected async importCollection(collectionName: string, importSettings: ImportSettings): Promise<void> {
  139. if (this.currentProgressingStatus == null) {
  140. throw new Error('Something went wrong: currentProgressingStatus is not initialized');
  141. }
  142. // prepare functions invoked from custom streams
  143. const convertDocuments = this.convertDocuments.bind(this);
  144. const bulkOperate = this.bulkOperate.bind(this);
  145. const execUnorderedBulkOpSafely = this.execUnorderedBulkOpSafely.bind(this);
  146. const emitProgressEvent = this.emitProgressEvent.bind(this);
  147. const collection = mongoose.connection.collection(collectionName);
  148. const { mode, jsonFileName, overwriteParams } = importSettings;
  149. const collectionProgress = this.currentProgressingStatus.progressMap[collectionName];
  150. try {
  151. const jsonFile = this.getFile(jsonFileName);
  152. // validate options
  153. this.validateImportSettings(collectionName, importSettings);
  154. // flush
  155. if (mode === ImportMode.flushAndInsert) {
  156. await collection.deleteMany({});
  157. }
  158. // stream 1
  159. const readStream = fs.createReadStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
  160. // stream 2
  161. const jsonStream = JSONStream.parse('*');
  162. // stream 3
  163. const convertStream = new Transform({
  164. objectMode: true,
  165. transform(doc, encoding, callback) {
  166. const converted = convertDocuments(collectionName, doc, overwriteParams);
  167. this.push(converted);
  168. callback();
  169. },
  170. });
  171. // stream 4
  172. const batchStream = createBatchStream(BULK_IMPORT_SIZE);
  173. // stream 5
  174. const writeStream = new Writable({
  175. objectMode: true,
  176. async write(batch, encoding, callback) {
  177. const unorderedBulkOp = collection.initializeUnorderedBulkOp();
  178. // documents are not persisted until unorderedBulkOp.execute()
  179. batch.forEach((document) => {
  180. bulkOperate(unorderedBulkOp, collectionName, document, importSettings);
  181. });
  182. // exec
  183. const { result, errors } = await execUnorderedBulkOpSafely(unorderedBulkOp);
  184. const { insertedCount, modifiedCount } = result;
  185. const errorCount = errors?.length ?? 0;
  186. logger.debug(`Importing ${collectionName}. Inserted: ${insertedCount}. Modified: ${modifiedCount}. Failed: ${errorCount}.`);
  187. const increment = insertedCount + modifiedCount + errorCount;
  188. collectionProgress.currentCount += increment;
  189. collectionProgress.totalCount += increment;
  190. collectionProgress.insertedCount += insertedCount;
  191. collectionProgress.modifiedCount += modifiedCount;
  192. emitProgressEvent(collectionProgress, errors);
  193. try {
  194. // First aid to prevent unexplained memory leaks
  195. logger.info('global.gc() invoked.');
  196. gc();
  197. }
  198. catch (err) {
  199. logger.error('fail garbage collection: ', err);
  200. }
  201. callback();
  202. },
  203. final(callback) {
  204. logger.info(`Importing ${collectionName} has completed.`);
  205. callback();
  206. },
  207. });
  208. await pipelinePromise(readStream, jsonStream, convertStream, batchStream, writeStream);
  209. // clean up tmp directory
  210. fs.unlinkSync(jsonFile);
  211. }
  212. catch (err) {
  213. throw new ImportingCollectionError(collectionProgress, err);
  214. }
  215. }
  216. /**
  217. *
  218. * @param {string} collectionName
  219. * @param {importSettings} importSettings
  220. */
  221. validateImportSettings(collectionName, importSettings) {
  222. const { mode } = importSettings;
  223. switch (collectionName) {
  224. case 'configs':
  225. if (mode !== ImportMode.flushAndInsert) {
  226. throw new Error(`The specified mode '${mode}' is not allowed when importing to 'configs' collection.`);
  227. }
  228. break;
  229. }
  230. }
  231. /**
  232. * process bulk operation
  233. * @param bulk MongoDB Bulk instance
  234. * @param collectionName collection name
  235. */
  236. bulkOperate(bulk, collectionName: string, document, importSettings: ImportSettings) {
  237. // insert
  238. if (importSettings.mode !== ImportMode.upsert) {
  239. return bulk.insert(document);
  240. }
  241. // upsert
  242. switch (collectionName) {
  243. case 'pages':
  244. return bulk.find({ path: document.path }).upsert().replaceOne(document);
  245. default:
  246. return bulk.find({ _id: document._id }).upsert().replaceOne(document);
  247. }
  248. }
  249. /**
  250. * emit progress event
  251. * @param {CollectionProgress} collectionProgress
  252. * @param {object} appendedErrors key: collection name, value: array of error object
  253. */
  254. emitProgressEvent(collectionProgress, appendedErrors) {
  255. const { collectionName } = collectionProgress;
  256. // send event (in progress in global)
  257. this.adminEvent.emit('onProgressForImport', { collectionName, collectionProgress, appendedErrors });
  258. }
  259. /**
  260. * emit terminate event
  261. */
  262. emitTerminateEvent() {
  263. this.adminEvent.emit('onTerminateForImport');
  264. }
  265. /**
  266. * extract a zip file
  267. *
  268. * @memberOf ImportService
  269. * @param {string} zipFile absolute path to zip file
  270. * @return {Array.<string>} array of absolute paths to extracted files
  271. */
  272. async unzip(zipFile) {
  273. const readStream = fs.createReadStream(zipFile);
  274. const parseStream = unzipStream.Parse();
  275. const unzipEntryStream = pipeline(readStream, parseStream, () => {});
  276. const files: string[] = [];
  277. unzipEntryStream.on('entry', (/** @type {Entry} */ entry) => {
  278. const fileName = entry.path;
  279. // https://regex101.com/r/mD4eZs/6
  280. // prevent from unexpecting attack doing unzip file (path traversal attack)
  281. // FOR EXAMPLE
  282. // ../../src/server/example.html
  283. if (fileName.match(/(\.\.\/|\.\.\\)/)) {
  284. logger.error('File path is not appropriate.', fileName);
  285. return;
  286. }
  287. if (fileName === this.growiBridgeService.getMetaFileName()) {
  288. // skip meta.json
  289. entry.autodrain();
  290. }
  291. else {
  292. const jsonFile = path.join(this.baseDir, fileName);
  293. const writeStream = fs.createWriteStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
  294. pipeline(entry, writeStream, () => {});
  295. files.push(jsonFile);
  296. }
  297. });
  298. await finished(unzipEntryStream);
  299. return files;
  300. }
  301. /**
  302. * execute unorderedBulkOp and ignore errors
  303. *
  304. * @memberOf ImportService
  305. */
  306. async execUnorderedBulkOpSafely(unorderedBulkOp: UnorderedBulkOperation): Promise<{ result: BulkWriteResult, errors?: WriteError[] }> {
  307. try {
  308. return {
  309. result: await unorderedBulkOp.execute(),
  310. };
  311. }
  312. catch (err) {
  313. const errTypeGuard = (err): err is MongoBulkWriteError => {
  314. return 'result' in err && 'writeErrors' in err;
  315. };
  316. if (errTypeGuard(err)) {
  317. return {
  318. result: err.result,
  319. errors: Array.isArray(err.writeErrors) ? err.writeErrors : [err.writeErrors],
  320. };
  321. }
  322. logger.error('Failed to execute unorderedBulkOp and the error could not handled.', err);
  323. throw new Error('Failed to execute unorderedBulkOp and the error could not handled.', err);
  324. }
  325. }
  326. /**
  327. * execute unorderedBulkOp and ignore errors
  328. *
  329. * @memberOf ImportService
  330. * @param collectionName
  331. * @param document document being imported
  332. * @returns document to be persisted
  333. */
  334. convertDocuments<D extends Document>(collectionName: string, document: D, overwriteParams: OverwriteParams): D {
  335. const Model = getModelFromCollectionName(collectionName);
  336. const schema = (Model != null) ? Model.schema : undefined;
  337. const convertMap = this.convertMap[collectionName];
  338. const _document: D = structuredClone(document);
  339. // apply keepOriginal to all of properties
  340. Object.entries(document).forEach(([propertyName, value]) => {
  341. _document[propertyName] = keepOriginal(value, { document, propertyName });
  342. });
  343. // Mongoose Model
  344. if (convertMap != null) {
  345. // assign value from documents being imported
  346. Object.entries(convertMap).forEach(([propertyName, convertedValue]) => {
  347. const value = document[propertyName];
  348. // distinguish between null and undefined
  349. if (value === undefined) {
  350. return; // next entry
  351. }
  352. const convertFunc = (typeof convertedValue === 'function') ? convertedValue : null;
  353. _document[propertyName] = (convertFunc != null) ? convertFunc(value, { document, propertyName, schema }) : convertedValue;
  354. });
  355. }
  356. // overwrite documents with custom values
  357. Object.entries(overwriteParams).forEach(([propertyName, overwriteValue]) => {
  358. const value = document[propertyName];
  359. // distinguish between null and undefined
  360. if (value !== undefined) {
  361. const overwriteFunc = (typeof overwriteValue === 'function') ? overwriteValue : null;
  362. _document[propertyName] = (overwriteFunc != null) ? overwriteFunc(value, { document: _document, propertyName, schema }) : overwriteValue;
  363. }
  364. });
  365. return _document;
  366. }
  367. /**
  368. * validate using meta.json
  369. * to pass validation, all the criteria must be met
  370. * - ${version of this GROWI} === ${version of GROWI that exported data}
  371. *
  372. * @memberOf ImportService
  373. * @param {object} meta meta data from meta.json
  374. */
  375. validate(meta) {
  376. if (meta.version !== this.crowi.version) {
  377. throw new Error('The version of this GROWI and the uploaded GROWI data are not the same');
  378. }
  379. // TODO: check if all migrations are completed
  380. // - export: throw err if there are pending migrations
  381. // - import: throw err if there are pending migrations
  382. }
  383. /**
  384. * Delete all uploaded files
  385. */
  386. deleteAllZipFiles() {
  387. fs.readdirSync(this.baseDir)
  388. .filter(file => path.extname(file) === '.zip')
  389. .forEach(file => fs.unlinkSync(path.join(this.baseDir, file)));
  390. }
  391. }