import.js 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538
  1. import gc from 'expose-gc/function';
  2. import loggerFactory from '~/utils/logger';
  3. const fs = require('fs');
  4. const path = require('path');
  5. const { Writable, Transform } = require('stream');
  6. const JSONStream = require('JSONStream');
  7. const parseISO = require('date-fns/parseISO');
  8. const isIsoDate = require('is-iso-date');
  9. const mongoose = require('mongoose');
  10. const streamToPromise = require('stream-to-promise');
  11. const unzipper = require('unzipper');
  12. const CollectionProgressingStatus = require('../models/vo/collection-progressing-status');
  13. const { createBatchStream } = require('../util/batch-stream');
  14. const { ObjectId } = mongoose.Types;
  15. const logger = loggerFactory('growi:services:ImportService'); // eslint-disable-line no-unused-vars
  16. const BULK_IMPORT_SIZE = 100;
  17. class ImportSettings {
  18. constructor(mode) {
  19. this.mode = mode || 'insert';
  20. this.jsonFileName = null;
  21. this.overwriteParams = null;
  22. }
  23. }
  24. class ImportingCollectionError extends Error {
  25. constructor(collectionProgress, error) {
  26. super(error);
  27. this.collectionProgress = collectionProgress;
  28. }
  29. }
  30. class ImportService {
  31. constructor(crowi) {
  32. this.crowi = crowi;
  33. this.growiBridgeService = crowi.growiBridgeService;
  34. this.getFile = this.growiBridgeService.getFile.bind(this);
  35. this.baseDir = path.join(crowi.tmpDir, 'imports');
  36. this.keepOriginal = this.keepOriginal.bind(this);
  37. this.adminEvent = crowi.event('admin');
  38. // { pages: { _id: ..., path: ..., ...}, users: { _id: ..., username: ..., }, ... }
  39. this.convertMap = {};
  40. this.initConvertMap(crowi.models);
  41. this.currentProgressingStatus = null;
  42. }
  43. /**
  44. * generate ImportSettings instance
  45. * @param {string} mode bulk operation mode (insert | upsert | flushAndInsert)
  46. */
  47. generateImportSettings(mode) {
  48. return new ImportSettings(mode);
  49. }
  50. /**
  51. * initialize convert map. set keepOriginal as default
  52. *
  53. * @memberOf ImportService
  54. * @param {object} models from models/index.js
  55. */
  56. initConvertMap(models) {
  57. // by default, original value is used for imported documents
  58. for (const model of Object.values(models)) {
  59. if (model.collection == null) {
  60. continue;
  61. }
  62. const collectionName = model.collection.name;
  63. this.convertMap[collectionName] = {};
  64. for (const key of Object.keys(model.schema.paths)) {
  65. this.convertMap[collectionName][key] = this.keepOriginal;
  66. }
  67. }
  68. }
  69. /**
  70. * keep original value
  71. * automatically convert ObjectId
  72. *
  73. * @memberOf ImportService
  74. * @param {any} value value from imported document
  75. * @param {{ document: object, schema: object, propertyName: string }}
  76. * @return {any} new value for the document
  77. *
  78. * @see https://mongoosejs.com/docs/api/schematype.html#schematype_SchemaType-cast
  79. */
  80. keepOriginal(value, { document, schema, propertyName }) {
  81. // Model
  82. if (schema != null && schema.path(propertyName) != null) {
  83. const schemaType = schema.path(propertyName);
  84. return schemaType.cast(value);
  85. }
  86. // _id
  87. if (propertyName === '_id' && ObjectId.isValid(value)) {
  88. return ObjectId(value);
  89. }
  90. // Date
  91. if (isIsoDate(value)) {
  92. return parseISO(value);
  93. }
  94. return value;
  95. }
  96. /**
  97. * parse all zip files in downloads dir
  98. *
  99. * @memberOf ExportService
  100. * @return {object} info for zip files and whether currentProgressingStatus exists
  101. */
  102. async getStatus() {
  103. const zipFiles = fs.readdirSync(this.baseDir).filter(file => path.extname(file) === '.zip');
  104. // process serially so as not to waste memory
  105. const zipFileStats = [];
  106. const parseZipFilePromises = zipFiles.map((file) => {
  107. const zipFile = this.getFile(file);
  108. return this.growiBridgeService.parseZipFile(zipFile);
  109. });
  110. for await (const stat of parseZipFilePromises) {
  111. zipFileStats.push(stat);
  112. }
  113. // filter null object (broken zip)
  114. const filtered = zipFileStats
  115. .filter(zipFileStat => zipFileStat != null);
  116. // sort with ctime("Change Time" - Time when file status was last changed (inode data modification).)
  117. filtered.sort((a, b) => { return a.fileStat.ctime - b.fileStat.ctime });
  118. const isImporting = this.currentProgressingStatus != null;
  119. const zipFileStat = filtered.pop();
  120. let isTheSameVersion = false;
  121. if (zipFileStat != null) {
  122. try {
  123. this.validate(zipFileStat.meta);
  124. isTheSameVersion = true;
  125. }
  126. catch (err) {
  127. isTheSameVersion = false;
  128. logger.error('the versions are not met', err);
  129. }
  130. }
  131. return {
  132. isTheSameVersion,
  133. zipFileStat,
  134. isImporting,
  135. progressList: isImporting ? this.currentProgressingStatus.progressList : null,
  136. };
  137. }
  138. /**
  139. * import collections from json
  140. *
  141. * @param {string} collections MongoDB collection name
  142. * @param {array} importSettingsMap key: collection name, value: ImportSettings instance
  143. */
  144. async import(collections, importSettingsMap) {
  145. // init status object
  146. this.currentProgressingStatus = new CollectionProgressingStatus(collections);
  147. const isV5Compatible = this.crowi.configManager.getConfig('crowi', 'app:isV5Compatible');
  148. const isImportPagesCollection = collections.includes('pages');
  149. const shouldNormalizePages = isV5Compatible && isImportPagesCollection;
  150. // set isV5Compatible to false
  151. if (shouldNormalizePages) await this.crowi.configManager.updateConfigsInTheSameNamespace('crowi', { 'app:isV5Compatible': false });
  152. // process serially so as not to waste memory
  153. const promises = collections.map((collectionName) => {
  154. const importSettings = importSettingsMap[collectionName];
  155. return this.importCollection(collectionName, importSettings);
  156. });
  157. for await (const promise of promises) {
  158. try {
  159. await promise;
  160. }
  161. // catch ImportingCollectionError
  162. catch (err) {
  163. const { collectionProgress } = err;
  164. logger.error(`failed to import to ${collectionProgress.collectionName}`, err);
  165. this.emitProgressEvent(collectionProgress, { message: err.message });
  166. }
  167. }
  168. // run normalizeAllPublicPages
  169. if (shouldNormalizePages) await this.crowi.pageService.normalizeAllPublicPages();
  170. this.currentProgressingStatus = null;
  171. this.emitTerminateEvent();
  172. }
  173. /**
  174. * import a collection from json
  175. *
  176. * @memberOf ImportService
  177. * @param {string} collectionName MongoDB collection name
  178. * @param {ImportSettings} importSettings
  179. * @return {insertedIds: Array.<string>, failedIds: Array.<string>}
  180. */
  181. async importCollection(collectionName, importSettings) {
  182. // prepare functions invoked from custom streams
  183. const convertDocuments = this.convertDocuments.bind(this);
  184. const bulkOperate = this.bulkOperate.bind(this);
  185. const execUnorderedBulkOpSafely = this.execUnorderedBulkOpSafely.bind(this);
  186. const emitProgressEvent = this.emitProgressEvent.bind(this);
  187. const collection = mongoose.connection.collection(collectionName);
  188. const { mode, jsonFileName, overwriteParams } = importSettings;
  189. const collectionProgress = this.currentProgressingStatus.progressMap[collectionName];
  190. try {
  191. const jsonFile = this.getFile(jsonFileName);
  192. // validate options
  193. this.validateImportSettings(collectionName, importSettings);
  194. // flush
  195. if (mode === 'flushAndInsert') {
  196. await collection.deleteMany({});
  197. }
  198. // stream 1
  199. const readStream = fs.createReadStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
  200. // stream 2
  201. const jsonStream = JSONStream.parse('*');
  202. // stream 3
  203. const convertStream = new Transform({
  204. objectMode: true,
  205. transform(doc, encoding, callback) {
  206. const converted = convertDocuments(collectionName, doc, overwriteParams);
  207. this.push(converted);
  208. callback();
  209. },
  210. });
  211. // stream 4
  212. const batchStream = createBatchStream(BULK_IMPORT_SIZE);
  213. // stream 5
  214. const writeStream = new Writable({
  215. objectMode: true,
  216. async write(batch, encoding, callback) {
  217. const unorderedBulkOp = collection.initializeUnorderedBulkOp();
  218. // documents are not persisted until unorderedBulkOp.execute()
  219. batch.forEach((document) => {
  220. bulkOperate(unorderedBulkOp, collectionName, document, importSettings);
  221. });
  222. // exec
  223. const { insertedCount, modifiedCount, errors } = await execUnorderedBulkOpSafely(unorderedBulkOp);
  224. logger.debug(`Importing ${collectionName}. Inserted: ${insertedCount}. Modified: ${modifiedCount}. Failed: ${errors.length}.`);
  225. const increment = insertedCount + modifiedCount + errors.length;
  226. collectionProgress.currentCount += increment;
  227. collectionProgress.totalCount += increment;
  228. collectionProgress.insertedCount += insertedCount;
  229. collectionProgress.modifiedCount += modifiedCount;
  230. emitProgressEvent(collectionProgress, errors);
  231. try {
  232. // First aid to prevent unexplained memory leaks
  233. logger.info('global.gc() invoked.');
  234. gc();
  235. }
  236. catch (err) {
  237. logger.error('fail garbage collection: ', err);
  238. }
  239. callback();
  240. },
  241. final(callback) {
  242. logger.info(`Importing ${collectionName} has completed.`);
  243. callback();
  244. },
  245. });
  246. readStream
  247. .pipe(jsonStream)
  248. .pipe(convertStream)
  249. .pipe(batchStream)
  250. .pipe(writeStream);
  251. await streamToPromise(writeStream);
  252. // clean up tmp directory
  253. fs.unlinkSync(jsonFile);
  254. }
  255. catch (err) {
  256. throw new ImportingCollectionError(collectionProgress, err);
  257. }
  258. }
  259. /**
  260. *
  261. * @param {string} collectionName
  262. * @param {importSettings} importSettings
  263. */
  264. validateImportSettings(collectionName, importSettings) {
  265. const { mode } = importSettings;
  266. switch (collectionName) {
  267. case 'configs':
  268. if (mode !== 'flushAndInsert') {
  269. throw new Error(`The specified mode '${mode}' is not allowed when importing to 'configs' collection.`);
  270. }
  271. break;
  272. }
  273. }
  274. /**
  275. * process bulk operation
  276. * @param {object} bulk MongoDB Bulk instance
  277. * @param {string} collectionName collection name
  278. * @param {object} document
  279. * @param {ImportSettings} importSettings
  280. */
  281. bulkOperate(bulk, collectionName, document, importSettings) {
  282. // insert
  283. if (importSettings.mode !== 'upsert') {
  284. return bulk.insert(document);
  285. }
  286. // upsert
  287. switch (collectionName) {
  288. case 'pages':
  289. return bulk.find({ path: document.path }).upsert().replaceOne(document);
  290. default:
  291. return bulk.find({ _id: document._id }).upsert().replaceOne(document);
  292. }
  293. }
  294. /**
  295. * emit progress event
  296. * @param {CollectionProgress} collectionProgress
  297. * @param {object} appendedErrors key: collection name, value: array of error object
  298. */
  299. emitProgressEvent(collectionProgress, appendedErrors) {
  300. const { collectionName } = collectionProgress;
  301. // send event (in progress in global)
  302. this.adminEvent.emit('onProgressForImport', { collectionName, collectionProgress, appendedErrors });
  303. }
  304. /**
  305. * emit terminate event
  306. */
  307. emitTerminateEvent() {
  308. this.adminEvent.emit('onTerminateForImport');
  309. }
  310. /**
  311. * extract a zip file
  312. *
  313. * @memberOf ImportService
  314. * @param {string} zipFile absolute path to zip file
  315. * @return {Array.<string>} array of absolute paths to extracted files
  316. */
  317. async unzip(zipFile) {
  318. const readStream = fs.createReadStream(zipFile);
  319. const unzipStream = readStream.pipe(unzipper.Parse());
  320. const files = [];
  321. unzipStream.on('entry', (entry) => {
  322. const fileName = entry.path;
  323. // https://regex101.com/r/mD4eZs/6
  324. // prevent from unexpecting attack doing unzip file (path traversal attack)
  325. // FOR EXAMPLE
  326. // ../../src/server/views/admin/markdown.html
  327. if (fileName.match(/(\.\.\/|\.\.\\)/)) {
  328. logger.error('File path is not appropriate.', fileName);
  329. return;
  330. }
  331. if (fileName === this.growiBridgeService.getMetaFileName()) {
  332. // skip meta.json
  333. entry.autodrain();
  334. }
  335. else {
  336. const jsonFile = path.join(this.baseDir, fileName);
  337. const writeStream = fs.createWriteStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
  338. entry.pipe(writeStream);
  339. files.push(jsonFile);
  340. }
  341. });
  342. await streamToPromise(unzipStream);
  343. return files;
  344. }
  345. /**
  346. * execute unorderedBulkOp and ignore errors
  347. *
  348. * @memberOf ImportService
  349. * @param {object} unorderedBulkOp result of Model.collection.initializeUnorderedBulkOp()
  350. * @return {object} e.g. { insertedCount: 10, errors: [...] }
  351. */
  352. async execUnorderedBulkOpSafely(unorderedBulkOp) {
  353. let errors = [];
  354. let result = null;
  355. try {
  356. const log = await unorderedBulkOp.execute();
  357. result = log.result;
  358. }
  359. catch (err) {
  360. result = err.result;
  361. errors = err.writeErrors || [err];
  362. errors.map((err) => {
  363. const moreDetailErr = err.err;
  364. return { _id: moreDetailErr.op._id, message: err.errmsg };
  365. });
  366. }
  367. const insertedCount = result.nInserted + result.nUpserted;
  368. const modifiedCount = result.nModified;
  369. return {
  370. insertedCount,
  371. modifiedCount,
  372. errors,
  373. };
  374. }
  375. /**
  376. * execute unorderedBulkOp and ignore errors
  377. *
  378. * @memberOf ImportService
  379. * @param {string} collectionName
  380. * @param {object} document document being imported
  381. * @param {object} overwriteParams overwrite each document with unrelated value. e.g. { creator: req.user }
  382. * @return {object} document to be persisted
  383. */
  384. convertDocuments(collectionName, document, overwriteParams) {
  385. const Model = this.growiBridgeService.getModelFromCollectionName(collectionName);
  386. const schema = (Model != null) ? Model.schema : null;
  387. const convertMap = this.convertMap[collectionName];
  388. const _document = {};
  389. // not Mongoose Model
  390. if (convertMap == null) {
  391. // apply keepOriginal to all of properties
  392. Object.entries(document).forEach(([propertyName, value]) => {
  393. _document[propertyName] = this.keepOriginal(value, { document, propertyName });
  394. });
  395. }
  396. // Mongoose Model
  397. else {
  398. // assign value from documents being imported
  399. Object.entries(convertMap).forEach(([propertyName, convertedValue]) => {
  400. const value = document[propertyName];
  401. // distinguish between null and undefined
  402. if (value === undefined) {
  403. return; // next entry
  404. }
  405. const convertFunc = (typeof convertedValue === 'function') ? convertedValue : null;
  406. _document[propertyName] = (convertFunc != null) ? convertFunc(value, { document, propertyName, schema }) : convertedValue;
  407. });
  408. }
  409. // overwrite documents with custom values
  410. Object.entries(overwriteParams).forEach(([propertyName, overwriteValue]) => {
  411. const value = document[propertyName];
  412. // distinguish between null and undefined
  413. if (value !== undefined) {
  414. const overwriteFunc = (typeof overwriteValue === 'function') ? overwriteValue : null;
  415. _document[propertyName] = (overwriteFunc != null) ? overwriteFunc(value, { document: _document, propertyName, schema }) : overwriteValue;
  416. }
  417. });
  418. return _document;
  419. }
  420. /**
  421. * validate using meta.json
  422. * to pass validation, all the criteria must be met
  423. * - ${version of this growi} === ${version of growi that exported data}
  424. *
  425. * @memberOf ImportService
  426. * @param {object} meta meta data from meta.json
  427. */
  428. validate(meta) {
  429. if (meta.version !== this.crowi.version) {
  430. throw new Error('the version of this growi and the growi that exported the data are not met');
  431. }
  432. // TODO: check if all migrations are completed
  433. // - export: throw err if there are pending migrations
  434. // - import: throw err if there are pending migrations
  435. }
  436. /**
  437. * Delete all uploaded files
  438. */
  439. deleteAllZipFiles() {
  440. fs.readdirSync(this.baseDir)
  441. .filter(file => path.extname(file) === '.zip')
  442. .forEach(file => fs.unlinkSync(path.join(this.baseDir, file)));
  443. }
  444. }
  445. module.exports = ImportService;