import.js 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541
  1. /**
  2. * @typedef {import("@types/unzip-stream").Parse} Parse
  3. * @typedef {import("@types/unzip-stream").Entry} Entry
  4. */
  5. import { parseISO } from 'date-fns/parseISO';
  6. import gc from 'expose-gc/function';
  7. import loggerFactory from '~/utils/logger';
  8. const fs = require('fs');
  9. const path = require('path');
  10. const { Writable, Transform } = require('stream');
  11. const JSONStream = require('JSONStream');
  12. const isIsoDate = require('is-iso-date');
  13. const mongoose = require('mongoose');
  14. const streamToPromise = require('stream-to-promise');
  15. const unzipStream = require('unzip-stream');
  16. const CollectionProgressingStatus = require('../models/vo/collection-progressing-status');
  17. const { createBatchStream } = require('../util/batch-stream');
  18. const { ObjectId } = mongoose.Types;
  19. const logger = loggerFactory('growi:services:ImportService'); // eslint-disable-line no-unused-vars
  20. const BULK_IMPORT_SIZE = 100;
  21. export class ImportSettings {
  22. constructor(mode) {
  23. this.mode = mode || 'insert';
  24. this.jsonFileName = null;
  25. this.overwriteParams = null;
  26. }
  27. }
  28. class ImportingCollectionError extends Error {
  29. constructor(collectionProgress, error) {
  30. super(error);
  31. this.collectionProgress = collectionProgress;
  32. }
  33. }
  34. class ImportService {
  35. constructor(crowi) {
  36. this.crowi = crowi;
  37. this.growiBridgeService = crowi.growiBridgeService;
  38. this.getFile = this.growiBridgeService.getFile.bind(this);
  39. this.baseDir = path.join(crowi.tmpDir, 'imports');
  40. this.keepOriginal = this.keepOriginal.bind(this);
  41. this.adminEvent = crowi.event('admin');
  42. // { pages: { _id: ..., path: ..., ...}, users: { _id: ..., username: ..., }, ... }
  43. this.convertMap = {};
  44. this.initConvertMap(crowi.models);
  45. this.currentProgressingStatus = null;
  46. }
  47. /**
  48. * generate ImportSettings instance
  49. * @param {string} mode bulk operation mode (insert | upsert | flushAndInsert)
  50. */
  51. generateImportSettings(mode) {
  52. return new ImportSettings(mode);
  53. }
  54. /**
  55. * initialize convert map. set keepOriginal as default
  56. *
  57. * @memberOf ImportService
  58. * @param {object} models from models/index.js
  59. */
  60. initConvertMap(models) {
  61. // by default, original value is used for imported documents
  62. for (const model of Object.values(models)) {
  63. if (model.collection == null) {
  64. continue;
  65. }
  66. const collectionName = model.collection.name;
  67. this.convertMap[collectionName] = {};
  68. for (const key of Object.keys(model.schema.paths)) {
  69. this.convertMap[collectionName][key] = this.keepOriginal;
  70. }
  71. }
  72. }
  73. /**
  74. * keep original value
  75. * automatically convert ObjectId
  76. *
  77. * @memberOf ImportService
  78. * @param {any} value value from imported document
  79. * @param {{ document: object, schema: object, propertyName: string }}
  80. * @return {any} new value for the document
  81. *
  82. * @see https://mongoosejs.com/docs/api/schematype.html#schematype_SchemaType-cast
  83. */
  84. keepOriginal(value, { document, schema, propertyName }) {
  85. // Model
  86. if (schema != null && schema.path(propertyName) != null) {
  87. const schemaType = schema.path(propertyName);
  88. return schemaType.cast(value);
  89. }
  90. // _id
  91. if (propertyName === '_id' && ObjectId.isValid(value)) {
  92. return ObjectId(value);
  93. }
  94. // Date
  95. if (isIsoDate(value)) {
  96. return parseISO(value);
  97. }
  98. return value;
  99. }
  100. /**
  101. * parse all zip files in downloads dir
  102. *
  103. * @memberOf ExportService
  104. * @return {object} info for zip files and whether currentProgressingStatus exists
  105. */
  106. async getStatus() {
  107. const zipFiles = fs.readdirSync(this.baseDir).filter(file => path.extname(file) === '.zip');
  108. // process serially so as not to waste memory
  109. const zipFileStats = [];
  110. const parseZipFilePromises = zipFiles.map((file) => {
  111. const zipFile = this.getFile(file);
  112. return this.growiBridgeService.parseZipFile(zipFile);
  113. });
  114. for await (const stat of parseZipFilePromises) {
  115. zipFileStats.push(stat);
  116. }
  117. // filter null object (broken zip)
  118. const filtered = zipFileStats
  119. .filter(zipFileStat => zipFileStat != null);
  120. // sort with ctime("Change Time" - Time when file status was last changed (inode data modification).)
  121. filtered.sort((a, b) => { return a.fileStat.ctime - b.fileStat.ctime });
  122. const isImporting = this.currentProgressingStatus != null;
  123. const zipFileStat = filtered.pop();
  124. let isTheSameVersion = false;
  125. if (zipFileStat != null) {
  126. try {
  127. this.validate(zipFileStat.meta);
  128. isTheSameVersion = true;
  129. }
  130. catch (err) {
  131. isTheSameVersion = false;
  132. logger.error('the versions are not met', err);
  133. }
  134. }
  135. return {
  136. isTheSameVersion,
  137. zipFileStat,
  138. isImporting,
  139. progressList: isImporting ? this.currentProgressingStatus.progressList : null,
  140. };
  141. }
  142. /**
  143. * import collections from json
  144. *
  145. * @param {string} collections MongoDB collection name
  146. * @param {array} importSettingsMap key: collection name, value: ImportSettings instance
  147. */
  148. async import(collections, importSettingsMap) {
  149. // init status object
  150. this.currentProgressingStatus = new CollectionProgressingStatus(collections);
  151. // process serially so as not to waste memory
  152. const promises = collections.map((collectionName) => {
  153. const importSettings = importSettingsMap[collectionName];
  154. return this.importCollection(collectionName, importSettings);
  155. });
  156. for await (const promise of promises) {
  157. try {
  158. await promise;
  159. }
  160. // catch ImportingCollectionError
  161. catch (err) {
  162. const { collectionProgress } = err;
  163. logger.error(`failed to import to ${collectionProgress.collectionName}`, err);
  164. this.emitProgressEvent(collectionProgress, { message: err.message });
  165. }
  166. }
  167. this.currentProgressingStatus = null;
  168. this.emitTerminateEvent();
  169. await this.crowi.configManager.loadConfigs();
  170. const currentIsV5Compatible = this.crowi.configManager.getConfig('crowi', 'app:isV5Compatible');
  171. const isImportPagesCollection = collections.includes('pages');
  172. const shouldNormalizePages = currentIsV5Compatible && isImportPagesCollection;
  173. if (shouldNormalizePages) await this.crowi.pageService.normalizeAllPublicPages();
  174. }
  175. /**
  176. * import a collection from json
  177. *
  178. * @memberOf ImportService
  179. * @param {string} collectionName MongoDB collection name
  180. * @param {ImportSettings} importSettings
  181. * @return {insertedIds: Array.<string>, failedIds: Array.<string>}
  182. */
  183. async importCollection(collectionName, importSettings) {
  184. // prepare functions invoked from custom streams
  185. const convertDocuments = this.convertDocuments.bind(this);
  186. const bulkOperate = this.bulkOperate.bind(this);
  187. const execUnorderedBulkOpSafely = this.execUnorderedBulkOpSafely.bind(this);
  188. const emitProgressEvent = this.emitProgressEvent.bind(this);
  189. const collection = mongoose.connection.collection(collectionName);
  190. const { mode, jsonFileName, overwriteParams } = importSettings;
  191. const collectionProgress = this.currentProgressingStatus.progressMap[collectionName];
  192. try {
  193. const jsonFile = this.getFile(jsonFileName);
  194. // validate options
  195. this.validateImportSettings(collectionName, importSettings);
  196. // flush
  197. if (mode === 'flushAndInsert') {
  198. await collection.deleteMany({});
  199. }
  200. // stream 1
  201. const readStream = fs.createReadStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
  202. // stream 2
  203. const jsonStream = JSONStream.parse('*');
  204. // stream 3
  205. const convertStream = new Transform({
  206. objectMode: true,
  207. transform(doc, encoding, callback) {
  208. const converted = convertDocuments(collectionName, doc, overwriteParams);
  209. this.push(converted);
  210. callback();
  211. },
  212. });
  213. // stream 4
  214. const batchStream = createBatchStream(BULK_IMPORT_SIZE);
  215. // stream 5
  216. const writeStream = new Writable({
  217. objectMode: true,
  218. async write(batch, encoding, callback) {
  219. const unorderedBulkOp = collection.initializeUnorderedBulkOp();
  220. // documents are not persisted until unorderedBulkOp.execute()
  221. batch.forEach((document) => {
  222. bulkOperate(unorderedBulkOp, collectionName, document, importSettings);
  223. });
  224. // exec
  225. const { insertedCount, modifiedCount, errors } = await execUnorderedBulkOpSafely(unorderedBulkOp);
  226. logger.debug(`Importing ${collectionName}. Inserted: ${insertedCount}. Modified: ${modifiedCount}. Failed: ${errors.length}.`);
  227. const increment = insertedCount + modifiedCount + errors.length;
  228. collectionProgress.currentCount += increment;
  229. collectionProgress.totalCount += increment;
  230. collectionProgress.insertedCount += insertedCount;
  231. collectionProgress.modifiedCount += modifiedCount;
  232. emitProgressEvent(collectionProgress, errors);
  233. try {
  234. // First aid to prevent unexplained memory leaks
  235. logger.info('global.gc() invoked.');
  236. gc();
  237. }
  238. catch (err) {
  239. logger.error('fail garbage collection: ', err);
  240. }
  241. callback();
  242. },
  243. final(callback) {
  244. logger.info(`Importing ${collectionName} has completed.`);
  245. callback();
  246. },
  247. });
  248. readStream
  249. .pipe(jsonStream)
  250. .pipe(convertStream)
  251. .pipe(batchStream)
  252. .pipe(writeStream);
  253. await streamToPromise(writeStream);
  254. // clean up tmp directory
  255. fs.unlinkSync(jsonFile);
  256. }
  257. catch (err) {
  258. throw new ImportingCollectionError(collectionProgress, err);
  259. }
  260. }
  261. /**
  262. *
  263. * @param {string} collectionName
  264. * @param {importSettings} importSettings
  265. */
  266. validateImportSettings(collectionName, importSettings) {
  267. const { mode } = importSettings;
  268. switch (collectionName) {
  269. case 'configs':
  270. if (mode !== 'flushAndInsert') {
  271. throw new Error(`The specified mode '${mode}' is not allowed when importing to 'configs' collection.`);
  272. }
  273. break;
  274. }
  275. }
  276. /**
  277. * process bulk operation
  278. * @param {object} bulk MongoDB Bulk instance
  279. * @param {string} collectionName collection name
  280. * @param {object} document
  281. * @param {ImportSettings} importSettings
  282. */
  283. bulkOperate(bulk, collectionName, document, importSettings) {
  284. // insert
  285. if (importSettings.mode !== 'upsert') {
  286. return bulk.insert(document);
  287. }
  288. // upsert
  289. switch (collectionName) {
  290. case 'pages':
  291. return bulk.find({ path: document.path }).upsert().replaceOne(document);
  292. default:
  293. return bulk.find({ _id: document._id }).upsert().replaceOne(document);
  294. }
  295. }
  296. /**
  297. * emit progress event
  298. * @param {CollectionProgress} collectionProgress
  299. * @param {object} appendedErrors key: collection name, value: array of error object
  300. */
  301. emitProgressEvent(collectionProgress, appendedErrors) {
  302. const { collectionName } = collectionProgress;
  303. // send event (in progress in global)
  304. this.adminEvent.emit('onProgressForImport', { collectionName, collectionProgress, appendedErrors });
  305. }
  306. /**
  307. * emit terminate event
  308. */
  309. emitTerminateEvent() {
  310. this.adminEvent.emit('onTerminateForImport');
  311. }
  312. /**
  313. * extract a zip file
  314. *
  315. * @memberOf ImportService
  316. * @param {string} zipFile absolute path to zip file
  317. * @return {Array.<string>} array of absolute paths to extracted files
  318. */
  319. async unzip(zipFile) {
  320. const readStream = fs.createReadStream(zipFile);
  321. const unzipStreamPipe = readStream.pipe(unzipStream.Parse());
  322. const files = [];
  323. unzipStreamPipe.on('entry', (/** @type {Entry} */ entry) => {
  324. const fileName = entry.path;
  325. // https://regex101.com/r/mD4eZs/6
  326. // prevent from unexpecting attack doing unzip file (path traversal attack)
  327. // FOR EXAMPLE
  328. // ../../src/server/example.html
  329. if (fileName.match(/(\.\.\/|\.\.\\)/)) {
  330. logger.error('File path is not appropriate.', fileName);
  331. return;
  332. }
  333. if (fileName === this.growiBridgeService.getMetaFileName()) {
  334. // skip meta.json
  335. entry.autodrain();
  336. }
  337. else {
  338. const jsonFile = path.join(this.baseDir, fileName);
  339. const writeStream = fs.createWriteStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
  340. entry.pipe(writeStream);
  341. files.push(jsonFile);
  342. }
  343. });
  344. await streamToPromise(unzipStreamPipe);
  345. return files;
  346. }
  347. /**
  348. * execute unorderedBulkOp and ignore errors
  349. *
  350. * @memberOf ImportService
  351. * @param {object} unorderedBulkOp result of Model.collection.initializeUnorderedBulkOp()
  352. * @return {object} e.g. { insertedCount: 10, errors: [...] }
  353. */
  354. async execUnorderedBulkOpSafely(unorderedBulkOp) {
  355. let errors = [];
  356. let result = null;
  357. try {
  358. const log = await unorderedBulkOp.execute();
  359. result = log.result;
  360. }
  361. catch (err) {
  362. result = err.result;
  363. errors = err.writeErrors || [err];
  364. errors.map((err) => {
  365. const moreDetailErr = err.err;
  366. return { _id: moreDetailErr.op._id, message: err.errmsg };
  367. });
  368. }
  369. const insertedCount = result.nInserted + result.nUpserted;
  370. const modifiedCount = result.nModified;
  371. return {
  372. insertedCount,
  373. modifiedCount,
  374. errors,
  375. };
  376. }
  377. /**
  378. * execute unorderedBulkOp and ignore errors
  379. *
  380. * @memberOf ImportService
  381. * @param {string} collectionName
  382. * @param {object} document document being imported
  383. * @param {object} overwriteParams overwrite each document with unrelated value. e.g. { creator: req.user }
  384. * @return {object} document to be persisted
  385. */
  386. convertDocuments(collectionName, document, overwriteParams) {
  387. const Model = this.growiBridgeService.getModelFromCollectionName(collectionName);
  388. const schema = (Model != null) ? Model.schema : null;
  389. const convertMap = this.convertMap[collectionName];
  390. const _document = {};
  391. // not Mongoose Model
  392. if (convertMap == null) {
  393. // apply keepOriginal to all of properties
  394. Object.entries(document).forEach(([propertyName, value]) => {
  395. _document[propertyName] = this.keepOriginal(value, { document, propertyName });
  396. });
  397. }
  398. // Mongoose Model
  399. else {
  400. // assign value from documents being imported
  401. Object.entries(convertMap).forEach(([propertyName, convertedValue]) => {
  402. const value = document[propertyName];
  403. // distinguish between null and undefined
  404. if (value === undefined) {
  405. return; // next entry
  406. }
  407. const convertFunc = (typeof convertedValue === 'function') ? convertedValue : null;
  408. _document[propertyName] = (convertFunc != null) ? convertFunc(value, { document, propertyName, schema }) : convertedValue;
  409. });
  410. }
  411. // overwrite documents with custom values
  412. Object.entries(overwriteParams).forEach(([propertyName, overwriteValue]) => {
  413. const value = document[propertyName];
  414. // distinguish between null and undefined
  415. if (value !== undefined) {
  416. const overwriteFunc = (typeof overwriteValue === 'function') ? overwriteValue : null;
  417. _document[propertyName] = (overwriteFunc != null) ? overwriteFunc(value, { document: _document, propertyName, schema }) : overwriteValue;
  418. }
  419. });
  420. return _document;
  421. }
  422. /**
  423. * validate using meta.json
  424. * to pass validation, all the criteria must be met
  425. * - ${version of this GROWI} === ${version of GROWI that exported data}
  426. *
  427. * @memberOf ImportService
  428. * @param {object} meta meta data from meta.json
  429. */
  430. validate(meta) {
  431. if (meta.version !== this.crowi.version) {
  432. throw new Error('The version of this GROWI and the uploaded GROWI data are not the same');
  433. }
  434. // TODO: check if all migrations are completed
  435. // - export: throw err if there are pending migrations
  436. // - import: throw err if there are pending migrations
  437. }
  438. /**
  439. * Delete all uploaded files
  440. */
  441. deleteAllZipFiles() {
  442. fs.readdirSync(this.baseDir)
  443. .filter(file => path.extname(file) === '.zip')
  444. .forEach(file => fs.unlinkSync(path.join(this.baseDir, file)));
  445. }
  446. }
  447. module.exports = ImportService;