import.js 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516
  1. import loggerFactory from '~/utils/logger';
  2. const logger = loggerFactory('growi:services:ImportService'); // eslint-disable-line no-unused-vars
  3. const fs = require('fs');
  4. const path = require('path');
  5. const isIsoDate = require('is-iso-date');
  6. const parseISO = require('date-fns/parseISO');
  7. const { Writable, Transform } = require('stream');
  8. const JSONStream = require('JSONStream');
  9. const streamToPromise = require('stream-to-promise');
  10. const unzipper = require('unzipper');
  11. const mongoose = require('mongoose');
  12. const { ObjectId } = mongoose.Types;
  13. const { createBatchStream } = require('../util/batch-stream');
  14. const CollectionProgressingStatus = require('../models/vo/collection-progressing-status');
  15. const BULK_IMPORT_SIZE = 100;
  16. class ImportSettings {
  17. constructor(mode) {
  18. this.mode = mode || 'insert';
  19. this.jsonFileName = null;
  20. this.overwriteParams = null;
  21. }
  22. }
  23. class ImportingCollectionError extends Error {
  24. constructor(collectionProgress, error) {
  25. super(error);
  26. this.collectionProgress = collectionProgress;
  27. }
  28. }
  29. class ImportService {
  30. constructor(crowi) {
  31. this.crowi = crowi;
  32. this.growiBridgeService = crowi.growiBridgeService;
  33. this.getFile = this.growiBridgeService.getFile.bind(this);
  34. this.baseDir = path.join(crowi.tmpDir, 'imports');
  35. this.keepOriginal = this.keepOriginal.bind(this);
  36. this.adminEvent = crowi.event('admin');
  37. // { pages: { _id: ..., path: ..., ...}, users: { _id: ..., username: ..., }, ... }
  38. this.convertMap = {};
  39. this.initConvertMap(crowi.models);
  40. this.currentProgressingStatus = null;
  41. }
  42. /**
  43. * generate ImportSettings instance
  44. * @param {string} mode bulk operation mode (insert | upsert | flushAndInsert)
  45. */
  46. generateImportSettings(mode) {
  47. return new ImportSettings(mode);
  48. }
  49. /**
  50. * initialize convert map. set keepOriginal as default
  51. *
  52. * @memberOf ImportService
  53. * @param {object} models from models/index.js
  54. */
  55. initConvertMap(models) {
  56. // by default, original value is used for imported documents
  57. for (const model of Object.values(models)) {
  58. if (model.collection == null) {
  59. continue;
  60. }
  61. const collectionName = model.collection.name;
  62. this.convertMap[collectionName] = {};
  63. for (const key of Object.keys(model.schema.paths)) {
  64. this.convertMap[collectionName][key] = this.keepOriginal;
  65. }
  66. }
  67. }
  68. /**
  69. * keep original value
  70. * automatically convert ObjectId
  71. *
  72. * @memberOf ImportService
  73. * @param {any} value value from imported document
  74. * @param {{ document: object, schema: object, propertyName: string }}
  75. * @return {any} new value for the document
  76. *
  77. * @see https://mongoosejs.com/docs/api/schematype.html#schematype_SchemaType-cast
  78. */
  79. keepOriginal(value, { document, schema, propertyName }) {
  80. // Model
  81. if (schema != null && schema.path(propertyName) != null) {
  82. const schemaType = schema.path(propertyName);
  83. return schemaType.cast(value);
  84. }
  85. // _id
  86. if (propertyName === '_id' && ObjectId.isValid(value)) {
  87. return ObjectId(value);
  88. }
  89. // Date
  90. if (isIsoDate(value)) {
  91. return parseISO(value);
  92. }
  93. return value;
  94. }
  95. /**
  96. * parse all zip files in downloads dir
  97. *
  98. * @memberOf ExportService
  99. * @return {object} info for zip files and whether currentProgressingStatus exists
  100. */
  101. async getStatus() {
  102. const zipFiles = fs.readdirSync(this.baseDir).filter(file => path.extname(file) === '.zip');
  103. // process serially so as not to waste memory
  104. const zipFileStats = [];
  105. const parseZipFilePromises = zipFiles.map((file) => {
  106. const zipFile = this.getFile(file);
  107. return this.growiBridgeService.parseZipFile(zipFile);
  108. });
  109. for await (const stat of parseZipFilePromises) {
  110. zipFileStats.push(stat);
  111. }
  112. // filter null object (broken zip)
  113. const filtered = zipFileStats
  114. .filter(zipFileStat => zipFileStat != null);
  115. // sort with ctime("Change Time" - Time when file status was last changed (inode data modification).)
  116. filtered.sort((a, b) => { return a.fileStat.ctime - b.fileStat.ctime });
  117. const isImporting = this.currentProgressingStatus != null;
  118. const zipFileStat = filtered.pop();
  119. let isTheSameVersion = false;
  120. if (zipFileStat != null) {
  121. try {
  122. this.validate(zipFileStat.meta);
  123. isTheSameVersion = true;
  124. }
  125. catch (err) {
  126. isTheSameVersion = false;
  127. logger.error('the versions are not met', err);
  128. }
  129. }
  130. return {
  131. isTheSameVersion,
  132. zipFileStat,
  133. isImporting,
  134. progressList: isImporting ? this.currentProgressingStatus.progressList : null,
  135. };
  136. }
  137. /**
  138. * import collections from json
  139. *
  140. * @param {string} collections MongoDB collection name
  141. * @param {array} importSettingsMap key: collection name, value: ImportSettings instance
  142. */
  143. async import(collections, importSettingsMap) {
  144. // init status object
  145. this.currentProgressingStatus = new CollectionProgressingStatus(collections);
  146. // process serially so as not to waste memory
  147. const promises = collections.map((collectionName) => {
  148. const importSettings = importSettingsMap[collectionName];
  149. return this.importCollection(collectionName, importSettings);
  150. });
  151. for await (const promise of promises) {
  152. try {
  153. await promise;
  154. }
  155. // catch ImportingCollectionError
  156. catch (err) {
  157. const { collectionProgress } = err;
  158. logger.error(`failed to import to ${collectionProgress.collectionName}`, err);
  159. this.emitProgressEvent(collectionProgress, { message: err.message });
  160. }
  161. }
  162. this.currentProgressingStatus = null;
  163. this.emitTerminateEvent();
  164. }
  165. /**
  166. * import a collection from json
  167. *
  168. * @memberOf ImportService
  169. * @param {string} collectionName MongoDB collection name
  170. * @param {ImportSettings} importSettings
  171. * @return {insertedIds: Array.<string>, failedIds: Array.<string>}
  172. */
  173. async importCollection(collectionName, importSettings) {
  174. // prepare functions invoked from custom streams
  175. const convertDocuments = this.convertDocuments.bind(this);
  176. const bulkOperate = this.bulkOperate.bind(this);
  177. const execUnorderedBulkOpSafely = this.execUnorderedBulkOpSafely.bind(this);
  178. const emitProgressEvent = this.emitProgressEvent.bind(this);
  179. const collection = mongoose.connection.collection(collectionName);
  180. const { mode, jsonFileName, overwriteParams } = importSettings;
  181. const collectionProgress = this.currentProgressingStatus.progressMap[collectionName];
  182. try {
  183. const jsonFile = this.getFile(jsonFileName);
  184. // validate options
  185. this.validateImportSettings(collectionName, importSettings);
  186. // flush
  187. if (mode === 'flushAndInsert') {
  188. await collection.deleteMany({});
  189. }
  190. // stream 1
  191. const readStream = fs.createReadStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
  192. // stream 2
  193. const jsonStream = JSONStream.parse('*');
  194. // stream 3
  195. const convertStream = new Transform({
  196. objectMode: true,
  197. transform(doc, encoding, callback) {
  198. const converted = convertDocuments(collectionName, doc, overwriteParams);
  199. this.push(converted);
  200. callback();
  201. },
  202. });
  203. // stream 4
  204. const batchStream = createBatchStream(BULK_IMPORT_SIZE);
  205. // stream 5
  206. const writeStream = new Writable({
  207. objectMode: true,
  208. async write(batch, encoding, callback) {
  209. const unorderedBulkOp = collection.initializeUnorderedBulkOp();
  210. // documents are not persisted until unorderedBulkOp.execute()
  211. batch.forEach((document) => {
  212. bulkOperate(unorderedBulkOp, collectionName, document, importSettings);
  213. });
  214. // exec
  215. const { insertedCount, modifiedCount, errors } = await execUnorderedBulkOpSafely(unorderedBulkOp);
  216. logger.debug(`Importing ${collectionName}. Inserted: ${insertedCount}. Modified: ${modifiedCount}. Failed: ${errors.length}.`);
  217. const increment = insertedCount + modifiedCount + errors.length;
  218. collectionProgress.currentCount += increment;
  219. collectionProgress.totalCount += increment;
  220. collectionProgress.insertedCount += insertedCount;
  221. collectionProgress.modifiedCount += modifiedCount;
  222. emitProgressEvent(collectionProgress, errors);
  223. callback();
  224. },
  225. final(callback) {
  226. logger.info(`Importing ${collectionName} has completed.`);
  227. callback();
  228. },
  229. });
  230. readStream
  231. .pipe(jsonStream)
  232. .pipe(convertStream)
  233. .pipe(batchStream)
  234. .pipe(writeStream);
  235. await streamToPromise(writeStream);
  236. // clean up tmp directory
  237. fs.unlinkSync(jsonFile);
  238. }
  239. catch (err) {
  240. throw new ImportingCollectionError(collectionProgress, err);
  241. }
  242. }
  243. /**
  244. *
  245. * @param {string} collectionName
  246. * @param {importSettings} importSettings
  247. */
  248. validateImportSettings(collectionName, importSettings) {
  249. const { mode } = importSettings;
  250. switch (collectionName) {
  251. case 'configs':
  252. if (mode !== 'flushAndInsert') {
  253. throw new Error(`The specified mode '${mode}' is not allowed when importing to 'configs' collection.`);
  254. }
  255. break;
  256. }
  257. }
  258. /**
  259. * process bulk operation
  260. * @param {object} bulk MongoDB Bulk instance
  261. * @param {string} collectionName collection name
  262. * @param {object} document
  263. * @param {ImportSettings} importSettings
  264. */
  265. bulkOperate(bulk, collectionName, document, importSettings) {
  266. // insert
  267. if (importSettings.mode !== 'upsert') {
  268. return bulk.insert(document);
  269. }
  270. // upsert
  271. switch (collectionName) {
  272. default:
  273. return bulk.find({ _id: document._id }).upsert().replaceOne(document);
  274. }
  275. }
  276. /**
  277. * emit progress event
  278. * @param {CollectionProgress} collectionProgress
  279. * @param {object} appendedErrors key: collection name, value: array of error object
  280. */
  281. emitProgressEvent(collectionProgress, appendedErrors) {
  282. const { collectionName } = collectionProgress;
  283. // send event (in progress in global)
  284. this.adminEvent.emit('onProgressForImport', { collectionName, collectionProgress, appendedErrors });
  285. }
  286. /**
  287. * emit terminate event
  288. */
  289. emitTerminateEvent() {
  290. this.adminEvent.emit('onTerminateForImport');
  291. }
  292. /**
  293. * extract a zip file
  294. *
  295. * @memberOf ImportService
  296. * @param {string} zipFile absolute path to zip file
  297. * @return {Array.<string>} array of absolute paths to extracted files
  298. */
  299. async unzip(zipFile) {
  300. const readStream = fs.createReadStream(zipFile);
  301. const unzipStream = readStream.pipe(unzipper.Parse());
  302. const files = [];
  303. unzipStream.on('entry', (entry) => {
  304. const fileName = entry.path;
  305. // https://regex101.com/r/mD4eZs/6
  306. // prevent from unexpecting attack doing unzip file (path traversal attack)
  307. // FOR EXAMPLE
  308. // ../../src/server/views/admin/markdown.html
  309. if (fileName.match(/(\.\.\/|\.\.\\)/)) {
  310. logger.error('File path is not appropriate.', fileName);
  311. return;
  312. }
  313. if (fileName === this.growiBridgeService.getMetaFileName()) {
  314. // skip meta.json
  315. entry.autodrain();
  316. }
  317. else {
  318. const jsonFile = path.join(this.baseDir, fileName);
  319. const writeStream = fs.createWriteStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
  320. entry.pipe(writeStream);
  321. files.push(jsonFile);
  322. }
  323. });
  324. await streamToPromise(unzipStream);
  325. return files;
  326. }
  327. /**
  328. * execute unorderedBulkOp and ignore errors
  329. *
  330. * @memberOf ImportService
  331. * @param {object} unorderedBulkOp result of Model.collection.initializeUnorderedBulkOp()
  332. * @return {object} e.g. { insertedCount: 10, errors: [...] }
  333. */
  334. async execUnorderedBulkOpSafely(unorderedBulkOp) {
  335. let errors = [];
  336. let result = null;
  337. try {
  338. const log = await unorderedBulkOp.execute();
  339. result = log.result;
  340. }
  341. catch (err) {
  342. result = err.result;
  343. errors = err.writeErrors || [err];
  344. errors.map((err) => {
  345. const moreDetailErr = err.err;
  346. return { _id: moreDetailErr.op._id, message: err.errmsg };
  347. });
  348. }
  349. const insertedCount = result.nInserted + result.nUpserted;
  350. const modifiedCount = result.nModified;
  351. return {
  352. insertedCount,
  353. modifiedCount,
  354. errors,
  355. };
  356. }
  357. /**
  358. * execute unorderedBulkOp and ignore errors
  359. *
  360. * @memberOf ImportService
  361. * @param {string} collectionName
  362. * @param {object} document document being imported
  363. * @param {object} overwriteParams overwrite each document with unrelated value. e.g. { creator: req.user }
  364. * @return {object} document to be persisted
  365. */
  366. convertDocuments(collectionName, document, overwriteParams) {
  367. const Model = this.growiBridgeService.getModelFromCollectionName(collectionName);
  368. const schema = (Model != null) ? Model.schema : null;
  369. const convertMap = this.convertMap[collectionName];
  370. const _document = {};
  371. // not Mongoose Model
  372. if (convertMap == null) {
  373. // apply keepOriginal to all of properties
  374. Object.entries(document).forEach(([propertyName, value]) => {
  375. _document[propertyName] = this.keepOriginal(value, { document, propertyName });
  376. });
  377. }
  378. // Mongoose Model
  379. else {
  380. // assign value from documents being imported
  381. Object.entries(convertMap).forEach(([propertyName, convertedValue]) => {
  382. const value = document[propertyName];
  383. // distinguish between null and undefined
  384. if (value === undefined) {
  385. return; // next entry
  386. }
  387. const convertFunc = (typeof convertedValue === 'function') ? convertedValue : null;
  388. _document[propertyName] = (convertFunc != null) ? convertFunc(value, { document, propertyName, schema }) : convertedValue;
  389. });
  390. }
  391. // overwrite documents with custom values
  392. Object.entries(overwriteParams).forEach(([propertyName, overwriteValue]) => {
  393. const value = document[propertyName];
  394. // distinguish between null and undefined
  395. if (value !== undefined) {
  396. const overwriteFunc = (typeof overwriteValue === 'function') ? overwriteValue : null;
  397. _document[propertyName] = (overwriteFunc != null) ? overwriteFunc(value, { document: _document, propertyName, schema }) : overwriteValue;
  398. }
  399. });
  400. return _document;
  401. }
  402. /**
  403. * validate using meta.json
  404. * to pass validation, all the criteria must be met
  405. * - ${version of this growi} === ${version of growi that exported data}
  406. *
  407. * @memberOf ImportService
  408. * @param {object} meta meta data from meta.json
  409. */
  410. validate(meta) {
  411. if (meta.version !== this.crowi.version) {
  412. throw new Error('the version of this growi and the growi that exported the data are not met');
  413. }
  414. // TODO: check if all migrations are completed
  415. // - export: throw err if there are pending migrations
  416. // - import: throw err if there are pending migrations
  417. }
  418. /**
  419. * Delete all uploaded files
  420. */
  421. deleteAllZipFiles() {
  422. fs.readdirSync(this.baseDir)
  423. .filter(file => path.extname(file) === '.zip')
  424. .forEach(file => fs.unlinkSync(path.join(this.baseDir, file)));
  425. }
  426. }
  427. module.exports = ImportService;