mirror of
https://github.com/hcengineering/platform.git
synced 2025-06-12 13:42:38 +00:00
Backup restore support (#1878)
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
parent
30f615647e
commit
5705281de5
@ -117,6 +117,7 @@ specifiers:
|
||||
'@rush-temp/server': file:./projects/server.tgz
|
||||
'@rush-temp/server-attachment': file:./projects/server-attachment.tgz
|
||||
'@rush-temp/server-attachment-resources': file:./projects/server-attachment-resources.tgz
|
||||
'@rush-temp/server-backup': file:./projects/server-backup.tgz
|
||||
'@rush-temp/server-board': file:./projects/server-board.tgz
|
||||
'@rush-temp/server-board-resources': file:./projects/server-board-resources.tgz
|
||||
'@rush-temp/server-calendar': file:./projects/server-calendar.tgz
|
||||
@ -205,6 +206,7 @@ specifiers:
|
||||
'@types/pdfkit': ~0.12.3
|
||||
'@types/prosemirror-model': ~1.16.0
|
||||
'@types/request': ~2.48.8
|
||||
'@types/tar-stream': ^2.2.2
|
||||
'@types/toposort': ^2.0.3
|
||||
'@types/uuid': ^8.3.1
|
||||
'@types/ws': ^8.2.1
|
||||
@ -247,7 +249,7 @@ specifiers:
|
||||
lexorank: ~1.0.4
|
||||
mime-types: ~2.1.34
|
||||
mini-css-extract-plugin: ^2.2.0
|
||||
minio: ~7.0.26
|
||||
minio: ^7.0.26
|
||||
mongodb: ^4.1.1
|
||||
pdfkit: ~0.13.0
|
||||
postcss: ^8.3.4
|
||||
@ -267,7 +269,7 @@ specifiers:
|
||||
svelte-loader: ^3.1.2
|
||||
svelte-preprocess: ^4.10.5
|
||||
svgo-loader: ^3.0.0
|
||||
tar-stream: ~2.2.0
|
||||
tar-stream: ^2.2.0
|
||||
toposort: ^2.0.2
|
||||
ts-loader: ^9.2.5
|
||||
ts-node: ~10.5.0
|
||||
@ -397,6 +399,7 @@ dependencies:
|
||||
'@rush-temp/server': file:projects/server.tgz
|
||||
'@rush-temp/server-attachment': file:projects/server-attachment.tgz
|
||||
'@rush-temp/server-attachment-resources': file:projects/server-attachment-resources.tgz
|
||||
'@rush-temp/server-backup': file:projects/server-backup.tgz
|
||||
'@rush-temp/server-board': file:projects/server-board.tgz
|
||||
'@rush-temp/server-board-resources': file:projects/server-board-resources.tgz
|
||||
'@rush-temp/server-calendar': file:projects/server-calendar.tgz
|
||||
@ -485,6 +488,7 @@ dependencies:
|
||||
'@types/pdfkit': 0.12.6
|
||||
'@types/prosemirror-model': 1.16.2
|
||||
'@types/request': 2.48.8
|
||||
'@types/tar-stream': 2.2.2
|
||||
'@types/toposort': 2.0.3
|
||||
'@types/uuid': 8.3.4
|
||||
'@types/ws': 8.5.3
|
||||
@ -3458,7 +3462,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
/brorand/1.1.0:
|
||||
resolution: {integrity: sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=}
|
||||
resolution: {integrity: sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==}
|
||||
dev: false
|
||||
|
||||
/brotli/1.3.2:
|
||||
@ -3577,7 +3581,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
/buffer-xor/1.0.3:
|
||||
resolution: {integrity: sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=}
|
||||
resolution: {integrity: sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ==}
|
||||
dev: false
|
||||
|
||||
/buffer/5.7.1:
|
||||
@ -5666,7 +5670,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
/hmac-drbg/1.0.1:
|
||||
resolution: {integrity: sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=}
|
||||
resolution: {integrity: sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==}
|
||||
dependencies:
|
||||
hash.js: 1.1.7
|
||||
minimalistic-assert: 1.0.1
|
||||
@ -6846,7 +6850,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
/json-stream/1.0.0:
|
||||
resolution: {integrity: sha1-GjhU4o0rvuqzHMfd9oPS3cVlJwg=}
|
||||
resolution: {integrity: sha512-H/ZGY0nIAg3QcOwE1QN/rK/Fa7gJn7Ii5obwp6zyPO4xiPNwpIMjqy2gwjBEGqzkF/vSWEIBQCBuN19hYiL6Qg==}
|
||||
dev: false
|
||||
|
||||
/json-stringify-safe/5.0.1:
|
||||
@ -7290,7 +7294,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
/minimalistic-crypto-utils/1.0.1:
|
||||
resolution: {integrity: sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=}
|
||||
resolution: {integrity: sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==}
|
||||
dev: false
|
||||
|
||||
/minimatch/3.1.2:
|
||||
@ -10286,7 +10290,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/account.tgz:
|
||||
resolution: {integrity: sha512-VacBIp2WmX+Az0pXnntBkM6SBH1lkBAInFi28nrMzgsuTIzwO8vPPlmghpdAd1eikYZu4ayjRKzaLg96W6WlEw==, tarball: file:projects/account.tgz}
|
||||
resolution: {integrity: sha512-5mXOtQrXSkbuM8AheKTq1tXMGxiRvfG/3xHlYQJkgQDYv32kTyt2VStCoSk6hsD47NRN6z97f6t3SiqKkwR7LA==, tarball: file:projects/account.tgz}
|
||||
name: '@rush-temp/account'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -10489,7 +10493,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/board-resources.tgz_3b42a51b6c974062237d417c554d9dd7:
|
||||
resolution: {integrity: sha512-JTTZMYUKVbavhuCPcuyMaTLlR4FkyMTr1tjEBvbSPvwxhK/7jCiDFeteZjiTaIk5TRTHbY5/6au0ZVeNJT/ZCA==, tarball: file:projects/board-resources.tgz}
|
||||
resolution: {integrity: sha512-2F+TLiTWGKuAzq/Ormj5tG3WRx18o4tfqC1CmIQ41lieT+JPGOnjXCfXDM7VXrvcz/YFOmXQUwYbEcPj3+y5CA==, tarball: file:projects/board-resources.tgz}
|
||||
id: file:projects/board-resources.tgz
|
||||
name: '@rush-temp/board-resources'
|
||||
version: 0.0.0
|
||||
@ -10524,7 +10528,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/board.tgz:
|
||||
resolution: {integrity: sha512-Lu5CQoDP90pXdtHP1TrELTX530sM7On4qQlxztDjBVc1aBpyzQyBByqYAR8D9QtkboUxlu3PF5BtjgjMaja0LA==, tarball: file:projects/board.tgz}
|
||||
resolution: {integrity: sha512-j5WtObIUOHOyg12kLi30TrYYLQUE0LzoDfoZVMdoemcr6dy4xtupLQnrUZgVA3quU5xWRzumnt22BdvazD4pbQ==, tarball: file:projects/board.tgz}
|
||||
name: '@rush-temp/board'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -11044,7 +11048,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/front.tgz:
|
||||
resolution: {integrity: sha512-A7D9zFiyLbSTYHnho8VEOi2X8s07XxvhAEwoZ+f4VxHu4Pln8u02qUfsucccvwXfI0IOEHgxtwhlspFxkJseVQ==, tarball: file:projects/front.tgz}
|
||||
resolution: {integrity: sha512-flfnxXk2Sd+RLOod6PXRrq45pwqhHRT36VsErirgsj/pqio4tMWu9UNN8dXhqAqYxzVPaePZ71TiYGlVf5yFUQ==, tarball: file:projects/front.tgz}
|
||||
name: '@rush-temp/front'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -11084,7 +11088,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/generator.tgz:
|
||||
resolution: {integrity: sha512-ZQ1Yd0+DPcyNKlo5sOIJIKJ1QAoufS5Lmm7NGBnHbLEowmX6uYG40bjKqCFrlXU4+5wshEdGF8QM4G78oTtqBg==, tarball: file:projects/generator.tgz}
|
||||
resolution: {integrity: sha512-ZiBdEg5io3k7CtNtteXvAajyKhM50zVcLeR8aB9tCWx8gxDiMLRoRN3s7QxzPhBfU90fZ257AejXOovBT5LlEw==, tarball: file:projects/generator.tgz}
|
||||
name: '@rush-temp/generator'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -11586,7 +11590,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/model-attachment.tgz_typescript@4.6.4:
|
||||
resolution: {integrity: sha512-nIehAbCecfn9iBlxTOTjuJTrJBMwjc/qJ1BwR3FxsO+/LAaFQlQMio/KRoe5hBEsM9GniEzj0mFhIMVUhmlqYg==, tarball: file:projects/model-attachment.tgz}
|
||||
resolution: {integrity: sha512-Njt9OQ6cKyN2O9vlzMpULPpPPW2SN+fmwhYjsj0JgqJjk5sk8PLybWtQkDvrG0r0Mpmgjfgb+EYR0Cbc+zlAvQ==, tarball: file:projects/model-attachment.tgz}
|
||||
id: file:projects/model-attachment.tgz
|
||||
name: '@rush-temp/model-attachment'
|
||||
version: 0.0.0
|
||||
@ -11607,7 +11611,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/model-board.tgz_typescript@4.6.4:
|
||||
resolution: {integrity: sha512-rI+fB33ZgVhDuupletdugfFSQPYCPWC5HGxRTvTcUe4sMlU017qUqmuXd9oyn1kxFcrJApQsHwJgrtCT62Ac1g==, tarball: file:projects/model-board.tgz}
|
||||
resolution: {integrity: sha512-djeqbluUfFNDCPaDdcu93/oZI1uEyOhAAVzI/V6susO7qjGJ9jniJLL6+OG9F2vjpWgZB65cSlDOm8Y7tSqRcw==, tarball: file:projects/model-board.tgz}
|
||||
id: file:projects/model-board.tgz
|
||||
name: '@rush-temp/model-board'
|
||||
version: 0.0.0
|
||||
@ -11670,7 +11674,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/model-contact.tgz_typescript@4.6.4:
|
||||
resolution: {integrity: sha512-ku4vmBGFHObQXvglFPzRDhc0K6S/5JWfFsePaJXKqeIiiUVL+F61UxivIFtz8k5Z30Q5YSt12PRSm/gg2UplZg==, tarball: file:projects/model-contact.tgz}
|
||||
resolution: {integrity: sha512-sEwikCTtOWDLzxd13Pb/rS8dfIBLpgRIeXp2fuYOZsZyYypPapLJUZ0iEIUQE5Nhtjwi5s1Zj5HDCyuaXdQpZg==, tarball: file:projects/model-contact.tgz}
|
||||
id: file:projects/model-contact.tgz
|
||||
name: '@rush-temp/model-contact'
|
||||
version: 0.0.0
|
||||
@ -11754,7 +11758,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/model-inventory.tgz_typescript@4.6.4:
|
||||
resolution: {integrity: sha512-6ANN/mcAqrqyBq0PfJ31TAe6EGfpqU2ClL3HjICzC5R2xjY0yDddsbnbq6ZTVzDQimqilPMljlsf0d9IiJIB4Q==, tarball: file:projects/model-inventory.tgz}
|
||||
resolution: {integrity: sha512-6n5r3yUqHiV0tkEr9O5wf6GaN1X+9SzkF9TCdHyzHDZOcMgVI/bh1GQFUus9gKwo5EnCmOI5UpbjGYfEhS34dQ==, tarball: file:projects/model-inventory.tgz}
|
||||
id: file:projects/model-inventory.tgz
|
||||
name: '@rush-temp/model-inventory'
|
||||
version: 0.0.0
|
||||
@ -11775,7 +11779,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/model-lead.tgz_typescript@4.6.4:
|
||||
resolution: {integrity: sha512-RrKYOgTYkhkJiqvWwK52JhZZPYePU6o2erBvzK9BzTWkS6WLw3mJsCueVC9dmp8QwE22tOxs39epuvlLpNusow==, tarball: file:projects/model-lead.tgz}
|
||||
resolution: {integrity: sha512-pkPqg9XKm+ct++u8uNcA7NGrDTyVSoghrQyarvOgjrLeHaTqO5Gr1quTybDujOzG3kVBIlvAas4Rqy8L9dStOw==, tarball: file:projects/model-lead.tgz}
|
||||
id: file:projects/model-lead.tgz
|
||||
name: '@rush-temp/model-lead'
|
||||
version: 0.0.0
|
||||
@ -11859,7 +11863,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/model-recruit.tgz_typescript@4.6.4:
|
||||
resolution: {integrity: sha512-vazDicipeMOzy5b4puug4qf5991NsQ/WH6rfLpvMwH26ekOlBcXpKWCLpK+5VaMMteumHFe+r2QIzTJUJyLW5w==, tarball: file:projects/model-recruit.tgz}
|
||||
resolution: {integrity: sha512-QQ7MN5ZmonZUM9u2wBDXLbovcyQ3mBTjDUePrYlTwaLmqMFK8z4uEhHyL/27myf7cXf2CJVg9GzZC84sT45ozQ==, tarball: file:projects/model-recruit.tgz}
|
||||
id: file:projects/model-recruit.tgz
|
||||
name: '@rush-temp/model-recruit'
|
||||
version: 0.0.0
|
||||
@ -12257,7 +12261,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/model-task.tgz_typescript@4.6.4:
|
||||
resolution: {integrity: sha512-1XOIKbszATMpm418Q5yDdRNediZGbafY0yI44/sQ++hl6zmvlnza07r6ZUV82F3O6WV/+itArMXN/eJoZtzYSQ==, tarball: file:projects/model-task.tgz}
|
||||
resolution: {integrity: sha512-f6lyA/YWIzyqKHVuRYtc8RFj1J3zdhSrSzIMwvKWFaM1oIoxNZFlZ5YrUjpRX+ThH6ZdGsxevoh2fPvV8CB18A==, tarball: file:projects/model-task.tgz}
|
||||
id: file:projects/model-task.tgz
|
||||
name: '@rush-temp/model-task'
|
||||
version: 0.0.0
|
||||
@ -12975,6 +12979,31 @@ packages:
|
||||
- supports-color
|
||||
dev: false
|
||||
|
||||
file:projects/server-backup.tgz:
|
||||
resolution: {integrity: sha512-7vvS8Xs3T4+qBGbyqRU3IoqbuK/JoRvEopSG2aHVhjZ15KqxZjYo8tYwWE5asbw/J6bgqfjPo+Znc2WEudteUA==, tarball: file:projects/server-backup.tgz}
|
||||
name: '@rush-temp/server-backup'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
'@rushstack/heft': 0.44.13
|
||||
'@types/heft-jest': 1.0.2
|
||||
'@types/minio': 7.0.13
|
||||
'@types/node': 16.11.33
|
||||
'@types/tar-stream': 2.2.2
|
||||
'@typescript-eslint/eslint-plugin': 5.22.0_27efc1da00e78084f5aa1809ff6483a1
|
||||
'@typescript-eslint/parser': 5.22.0_eslint@7.32.0+typescript@4.6.4
|
||||
eslint: 7.32.0
|
||||
eslint-config-standard-with-typescript: 21.0.1_d91f81f4c73639e41ff8a6e8953d9ef2
|
||||
eslint-plugin-import: 2.26.0_eslint@7.32.0
|
||||
eslint-plugin-node: 11.1.0_eslint@7.32.0
|
||||
eslint-plugin-promise: 5.2.0_eslint@7.32.0
|
||||
minio: 7.0.28
|
||||
prettier: 2.6.2
|
||||
tar-stream: 2.2.0
|
||||
typescript: 4.6.4
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
dev: false
|
||||
|
||||
file:projects/server-board-resources.tgz:
|
||||
resolution: {integrity: sha512-yuHcbbPwhimuIQJdckcAsdZpTNnx4UP8VXfxQURnWia6B/oTTS4ir5UoTEv+kQwONZ6LjHbkl6MUaF7pww+7nA==, tarball: file:projects/server-board-resources.tgz}
|
||||
name: '@rush-temp/server-board-resources'
|
||||
@ -13140,7 +13169,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/server-core.tgz:
|
||||
resolution: {integrity: sha512-+DXHxThbb36Ht7dFBEqwP/vNHu8MqyPAWTYr2jxN3IbdCD2eaP+9Qjodx7CcoPmLlLH4eviC+m5SDtf9WKttnQ==, tarball: file:projects/server-core.tgz}
|
||||
resolution: {integrity: sha512-+3dVw984M4MFZY0AH9KnkEKW8Hcmc/WPc4TJVNo1klRtL8qYJVCASUPeyK50kRxW/9gBIDG+D3F14+NTw8v4Ww==, tarball: file:projects/server-core.tgz}
|
||||
name: '@rush-temp/server-core'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -13553,7 +13582,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/server-token.tgz:
|
||||
resolution: {integrity: sha512-vQU2vXCGuN7fw3QwC2ojg/afBgrgU9Q4rzcGJD+h5FLIlC8p2Zy3c3azvGEmgyqEe5DWwOxPWfphRde5ideKNQ==, tarball: file:projects/server-token.tgz}
|
||||
resolution: {integrity: sha512-HXUfczQRmjdVFQV0TzYh7IdwLmy7jkj8OpOMYNQsO6KUVcfFcnNl5Mww2NPfSISxUXnwwH2qhsdDAZp4bSjz0g==, tarball: file:projects/server-token.tgz}
|
||||
name: '@rush-temp/server-token'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -13577,7 +13606,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/server-tool.tgz:
|
||||
resolution: {integrity: sha512-0ZEIuNGugYRx0QrOCe/nxsHDNU9aBBGQswOBY5KGF8s8ML26En4KzApMSZa+UwBMp0/o6PH5B9spgKcebTx1fA==, tarball: file:projects/server-tool.tgz}
|
||||
resolution: {integrity: sha512-aLmvs4omQ98WHoRKxIug9Vlv/hMcEB5G6CAachjpWNW+REBnlXvbm86HE4P3PiQGyewxiJAHGfBU1BR9B5xzuQ==, tarball: file:projects/server-tool.tgz}
|
||||
name: '@rush-temp/server-tool'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -13629,7 +13658,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/server.tgz:
|
||||
resolution: {integrity: sha512-lRayBkDeSMgiAGQoywA11vtZSzxQaIBP3l9FNpgvV8yA8h/tJdU7vURyhGnoOswggj6oicWgWH3apVK8BeM/SA==, tarball: file:projects/server.tgz}
|
||||
resolution: {integrity: sha512-49oBSEWK0yxriDWYYOGqKcbTZTyiWXq4qzXZGqIh+XJsPF9yXeV2Z1oNOaBf2MZMJdu+XYcETuS63XGpAfdaDw==, tarball: file:projects/server.tgz}
|
||||
name: '@rush-temp/server'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -13681,7 +13710,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/setting-resources.tgz_3b42a51b6c974062237d417c554d9dd7:
|
||||
resolution: {integrity: sha512-iYbJ0URjqrwjxOT+Lc9kFSBoq0CtlYvF/+ifv5ibTzfVqNJZ4lvqYlzjcDkgjnPX8//QMlq/Y15LrVpeQb03jg==, tarball: file:projects/setting-resources.tgz}
|
||||
resolution: {integrity: sha512-9dxWMCJHajLbFjKG/EZU5BnrzpS9/hasBOQT/VgYXYN7C/lbKOBgO6L9HXNojuH1nxtDdQlI/ivdgZ+lNKx/1Q==, tarball: file:projects/setting-resources.tgz}
|
||||
id: file:projects/setting-resources.tgz
|
||||
name: '@rush-temp/setting-resources'
|
||||
version: 0.0.0
|
||||
@ -13757,7 +13786,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/tags-resources.tgz_3b42a51b6c974062237d417c554d9dd7:
|
||||
resolution: {integrity: sha512-zcvJZS/Xb8SBYoEDWoDc0QD/O3cCu5a1DciPIPX0fhcq3qDWOBETmQ8LI4b1cSthT2Af++RGYK/n6IwkjnqPnA==, tarball: file:projects/tags-resources.tgz}
|
||||
resolution: {integrity: sha512-t1nqlFMZUlbH4tUAjav/sgCr1Bftqu1dRsXIPJ5eMiyqmmt2AlnUkDLU5Nc/4NQBecrjpaSdhmE35TDqfAj01g==, tarball: file:projects/tags-resources.tgz}
|
||||
id: file:projects/tags-resources.tgz
|
||||
name: '@rush-temp/tags-resources'
|
||||
version: 0.0.0
|
||||
@ -14149,7 +14178,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/tool.tgz:
|
||||
resolution: {integrity: sha512-FDtGsWkU/bXJkI2a4pv99KMKgUbBZIaH6OEcDApP6N18CAwPVT6neRFMTNb+3h98qwYOZLLU0amG3+ZFCkvrcw==, tarball: file:projects/tool.tgz}
|
||||
resolution: {integrity: sha512-vFEOkiSzdxppqolIv40FVwtuG4BYtqBCDV3Ib7DqVAnhnIBry+iQt6FYzDdJCDSKuVGqAzBCZjGWR+rZwxsYUQ==, tarball: file:projects/tool.tgz}
|
||||
name: '@rush-temp/tool'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -14250,7 +14279,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/tracker.tgz:
|
||||
resolution: {integrity: sha512-jJXCNF7rO8BJWituKKnGIbTjax6K2Rmh7vUf5w8tUv6Pz6X8Zeo/x1fOfSSWEeU6E+32BTJHe0/D/9bcE0dZLQ==, tarball: file:projects/tracker.tgz}
|
||||
resolution: {integrity: sha512-DkdOcMpMu2O34xIr/YlSftYF6YWKw0Uk4hWOgtt3jn2+1pGUROQs0Ht/ivmsxr5sxNns59xHJrbiGKLQB7g39w==, tarball: file:projects/tracker.tgz}
|
||||
name: '@rush-temp/tracker'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
|
@ -3,7 +3,11 @@
|
||||
// See the @microsoft/rush package's LICENSE file for license information.
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
|
@ -3,7 +3,11 @@
|
||||
// See the @microsoft/rush package's LICENSE file for license information.
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
|
@ -74,6 +74,10 @@ class ServerStorageWrapper implements ClientConnection {
|
||||
async loadDocs (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {}
|
||||
}
|
||||
|
||||
class NullFullTextAdapter implements FullTextAdapter {
|
||||
|
@ -53,7 +53,7 @@
|
||||
"@anticrm/chunter": "~0.6.1",
|
||||
"pdfkit": "~0.13.0",
|
||||
"@anticrm/attachment": "~0.6.1",
|
||||
"minio": "~7.0.26",
|
||||
"minio": "^7.0.26",
|
||||
"@types/pdfkit": "~0.12.3",
|
||||
"@anticrm/task": "~0.6.0",
|
||||
"jpeg-js": "~0.4.3",
|
||||
|
@ -70,6 +70,10 @@ class InMemoryTxAdapter implements TxAdapter {
|
||||
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -39,8 +39,7 @@
|
||||
"@types/ws": "^8.2.1",
|
||||
"@types/xml2js": "~0.4.9",
|
||||
"@types/mime-types": "~2.1.1",
|
||||
"@types/request": "~2.48.8",
|
||||
"@types/tar-stream": "~2.2.2"
|
||||
"@types/request": "~2.48.8"
|
||||
},
|
||||
"dependencies": {
|
||||
"mongodb": "^4.1.1",
|
||||
@ -48,7 +47,7 @@
|
||||
"@anticrm/account": "~0.6.0",
|
||||
"@anticrm/core": "~0.6.16",
|
||||
"@anticrm/contact": "~0.6.5",
|
||||
"minio": "~7.0.26",
|
||||
"minio": "^7.0.26",
|
||||
"@anticrm/model-all": "~0.6.0",
|
||||
"@anticrm/model-telegram": "~0.6.0",
|
||||
"@anticrm/telegram": "~0.6.2",
|
||||
@ -105,6 +104,6 @@
|
||||
"@anticrm/rekoni": "~0.6.0",
|
||||
"request": "~2.88.2",
|
||||
"@anticrm/tags": "~0.6.2",
|
||||
"tar-stream": "~2.2.0"
|
||||
"@anticrm/server-backup": "~0.6.0"
|
||||
}
|
||||
}
|
||||
|
@ -1,244 +0,0 @@
|
||||
//
|
||||
// Copyright © 2020, 2021 Anticrm Platform Contributors.
|
||||
// Copyright © 2021 Hardcore Engineering Inc.
|
||||
//
|
||||
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License. You may
|
||||
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
//
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import core, {
|
||||
BackupClient,
|
||||
BlobData,
|
||||
Client as CoreClient,
|
||||
Doc,
|
||||
Domain,
|
||||
DOMAIN_MODEL,
|
||||
DOMAIN_TRANSIENT,
|
||||
Ref
|
||||
} from '@anticrm/core'
|
||||
import { createWriteStream, existsSync } from 'fs'
|
||||
import { mkdir, readFile, writeFile } from 'fs/promises'
|
||||
import { createGzip } from 'node:zlib'
|
||||
import { join } from 'path'
|
||||
import { Pack, pack } from 'tar-stream'
|
||||
import { gunzipSync, gzipSync } from 'zlib'
|
||||
import { connect } from './connect'
|
||||
|
||||
const dataBlobSize = 100 * 1024 * 1024
|
||||
|
||||
export interface Snapshot {
|
||||
added: Record<Ref<Doc>, string>
|
||||
updated: Record<Ref<Doc>, string>
|
||||
removed: Ref<Doc>[]
|
||||
}
|
||||
export interface DomainData {
|
||||
snapshot?: string
|
||||
storage?: string[]
|
||||
|
||||
// Some statistics
|
||||
added: number
|
||||
updated: number
|
||||
removed: number
|
||||
}
|
||||
export interface BackupSnapshot {
|
||||
// _id => hash of added items.
|
||||
domains: Record<Domain, DomainData>
|
||||
date: number
|
||||
}
|
||||
export interface BackupInfo {
|
||||
version: string
|
||||
snapshots: BackupSnapshot[]
|
||||
}
|
||||
|
||||
async function loadDigest (
|
||||
fileName: string,
|
||||
snapshots: BackupSnapshot[],
|
||||
domain: Domain
|
||||
): Promise<Map<Ref<Doc>, string>> {
|
||||
const result = new Map<Ref<Doc>, string>()
|
||||
for (const s of snapshots) {
|
||||
const d = s.domains[domain]
|
||||
if (d?.snapshot !== undefined) {
|
||||
const dChanges: Snapshot = JSON.parse(gunzipSync(await readFile(join(fileName, d.snapshot))).toString())
|
||||
for (const [k, v] of Object.entries(dChanges.added)) {
|
||||
result.set(k as Ref<Doc>, v)
|
||||
}
|
||||
for (const [k, v] of Object.entries(dChanges.updated)) {
|
||||
result.set(k as Ref<Doc>, v)
|
||||
}
|
||||
for (const d of dChanges.removed) {
|
||||
result.delete(d)
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export async function backupWorkspace (transactorUrl: string, dbName: string, fileName: string): Promise<void> {
|
||||
const connection = (await connect(transactorUrl, dbName, {
|
||||
mode: 'backup'
|
||||
})) as unknown as CoreClient & BackupClient
|
||||
try {
|
||||
const domains = connection
|
||||
.getHierarchy()
|
||||
.domains()
|
||||
.filter((it) => it !== DOMAIN_TRANSIENT && it !== DOMAIN_MODEL)
|
||||
|
||||
if (!existsSync(fileName)) {
|
||||
await mkdir(fileName, { recursive: true })
|
||||
}
|
||||
|
||||
let backupInfo: BackupInfo = {
|
||||
version: '0.6',
|
||||
snapshots: []
|
||||
}
|
||||
const infoFile = join(fileName, 'backup.json.gz')
|
||||
|
||||
if (existsSync(infoFile)) {
|
||||
backupInfo = JSON.parse(gunzipSync(await readFile(infoFile)).toString())
|
||||
}
|
||||
|
||||
const snapshot: BackupSnapshot = {
|
||||
date: Date.now(),
|
||||
domains: {}
|
||||
}
|
||||
|
||||
backupInfo.snapshots.push(snapshot)
|
||||
let backupIndex = `${backupInfo.snapshots.length}`
|
||||
while (backupIndex.length < 6) {
|
||||
backupIndex = '0' + backupIndex
|
||||
}
|
||||
const bdir = join(fileName, backupIndex)
|
||||
if (!existsSync(bdir)) {
|
||||
await mkdir(bdir, { recursive: true })
|
||||
}
|
||||
|
||||
for (const c of domains) {
|
||||
console.log('dumping domain...', c)
|
||||
|
||||
const changes: Snapshot = {
|
||||
added: {},
|
||||
updated: {},
|
||||
removed: []
|
||||
}
|
||||
let changed = 0
|
||||
let stIndex = 0
|
||||
const domainInfo: Required<DomainData> = {
|
||||
snapshot: join(backupIndex, `${c}-${snapshot.date}.json.gz`),
|
||||
storage: [],
|
||||
added: 0,
|
||||
updated: 0,
|
||||
removed: 0
|
||||
}
|
||||
|
||||
// Comulative digest
|
||||
const digest = await loadDigest(fileName, backupInfo.snapshots, c)
|
||||
|
||||
let idx: number | undefined
|
||||
|
||||
let _pack: Pack | undefined
|
||||
let addedDocuments = 0
|
||||
|
||||
// update digest tar
|
||||
while (true) {
|
||||
const it = await connection.loadChunk(c, idx)
|
||||
idx = it.idx
|
||||
|
||||
const needRetrieve: Ref<Doc>[] = []
|
||||
|
||||
for (const [k, v] of Object.entries(it.docs)) {
|
||||
const kHash = digest.get(k as Ref<Doc>)
|
||||
if (kHash !== undefined) {
|
||||
digest.delete(k as Ref<Doc>)
|
||||
if (kHash !== v) {
|
||||
changes.updated[k as Ref<Doc>] = v
|
||||
needRetrieve.push(k as Ref<Doc>)
|
||||
changed++
|
||||
}
|
||||
} else {
|
||||
changes.added[k as Ref<Doc>] = v
|
||||
needRetrieve.push(k as Ref<Doc>)
|
||||
changed++
|
||||
}
|
||||
}
|
||||
if (needRetrieve.length > 0) {
|
||||
const docs = await connection.loadDocs(c, needRetrieve)
|
||||
|
||||
// Chunk data into small pieces
|
||||
if (addedDocuments > dataBlobSize && _pack !== undefined) {
|
||||
_pack.finalize()
|
||||
_pack = undefined
|
||||
addedDocuments = 0
|
||||
}
|
||||
if (_pack === undefined) {
|
||||
_pack = pack()
|
||||
stIndex++
|
||||
const storageFile = join(backupIndex, `${c}-data-${snapshot.date}-${stIndex}.tar.gz`)
|
||||
console.log('storing from domain', c, storageFile)
|
||||
domainInfo.storage.push(storageFile)
|
||||
const dataStream = createWriteStream(join(fileName, storageFile))
|
||||
const storageZip = createGzip()
|
||||
|
||||
_pack.pipe(storageZip)
|
||||
storageZip.pipe(dataStream)
|
||||
}
|
||||
|
||||
for (const d of docs) {
|
||||
if (d._class === core.class.BlobData) {
|
||||
const blob = d as BlobData
|
||||
const data = Buffer.from(blob.base64Data, 'base64')
|
||||
blob.base64Data = ''
|
||||
const descrJson = JSON.stringify(d)
|
||||
addedDocuments += descrJson.length
|
||||
addedDocuments += data.length
|
||||
_pack.entry({ name: d._id + '.json' }, descrJson, function (err) {
|
||||
if (err != null) throw err
|
||||
})
|
||||
_pack.entry({ name: d._id }, data, function (err) {
|
||||
if (err != null) throw err
|
||||
})
|
||||
} else {
|
||||
const data = JSON.stringify(d)
|
||||
addedDocuments += data.length
|
||||
_pack.entry({ name: d._id + '.json' }, data, function (err) {
|
||||
if (err != null) throw err
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (it.finished) {
|
||||
break
|
||||
}
|
||||
}
|
||||
changes.removed = Array.from(digest.keys())
|
||||
if (changes.removed.length > 0) {
|
||||
changed++
|
||||
}
|
||||
|
||||
if (changed > 0) {
|
||||
snapshot.domains[c] = domainInfo
|
||||
domainInfo.added = Object.keys(changes.added).length
|
||||
domainInfo.updated = Object.keys(changes.updated).length
|
||||
domainInfo.removed = changes.removed.length
|
||||
await writeFile(join(fileName, domainInfo.snapshot), gzipSync(JSON.stringify(changes)))
|
||||
_pack?.finalize()
|
||||
}
|
||||
}
|
||||
|
||||
await writeFile(infoFile, gzipSync(JSON.stringify(backupInfo, undefined, 2)))
|
||||
} finally {
|
||||
await connection.close()
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import client from '@anticrm/client'
|
||||
import clientResources from '@anticrm/client-resources'
|
||||
import { Client } from '@anticrm/core'
|
||||
import { setMetadata } from '@anticrm/platform'
|
||||
import { generateToken } from '@anticrm/server-token'
|
||||
|
||||
// eslint-disable-next-line
|
||||
const WebSocket = require('ws')
|
||||
|
||||
export async function connect (
|
||||
transactorUrl: string,
|
||||
workspace: string,
|
||||
extra?: Record<string, string>
|
||||
): Promise<Client> {
|
||||
console.log('connecting to transactor...')
|
||||
const token = generateToken('anticrm@hc.engineering', workspace, extra)
|
||||
|
||||
// We need to override default factory with 'ws' one.
|
||||
setMetadata(client.metadata.ClientSocketFactory, (url) => new WebSocket(url))
|
||||
return await (await clientResources()).function.GetClient(token, transactorUrl)
|
||||
}
|
@ -441,6 +441,10 @@ class MongoReadOnlyAdapter extends TxProcessor implements DbAdapter {
|
||||
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {}
|
||||
}
|
||||
|
||||
class MongoReadOnlyTxAdapter extends MongoReadOnlyAdapter implements TxAdapter {
|
||||
|
@ -31,6 +31,7 @@ import core, {
|
||||
} from '@anticrm/core'
|
||||
import recruit from '@anticrm/model-recruit'
|
||||
import { Applicant, Candidate, Vacancy } from '@anticrm/recruit'
|
||||
import { connect } from '@anticrm/server-tool'
|
||||
import task, { calcRank, DoneState, genRanks, Kanban, State } from '@anticrm/task'
|
||||
import { deepEqual } from 'fast-equals'
|
||||
import { existsSync } from 'fs'
|
||||
@ -39,7 +40,6 @@ import mime from 'mime-types'
|
||||
import { Client } from 'minio'
|
||||
import { dirname, join } from 'path'
|
||||
import { parseStringPromise } from 'xml2js'
|
||||
import { connect } from './connect'
|
||||
import { ElasticTool } from './elastic'
|
||||
import { findOrUpdateAttached } from './utils'
|
||||
|
||||
|
@ -28,12 +28,12 @@ import {
|
||||
upgradeWorkspace
|
||||
} from '@anticrm/account'
|
||||
import { setMetadata } from '@anticrm/platform'
|
||||
import { backup, backupList, createFileBackupStorage, createMinioBackupStorage, restore } from '@anticrm/server-backup'
|
||||
import { decodeToken, generateToken } from '@anticrm/server-token'
|
||||
import toolPlugin, { prepareTools, version } from '@anticrm/server-tool'
|
||||
import { program } from 'commander'
|
||||
import { Db, MongoClient } from 'mongodb'
|
||||
import { exit } from 'process'
|
||||
import { backupWorkspace } from './backup'
|
||||
import { rebuildElastic } from './elastic'
|
||||
import { importXml } from './importer'
|
||||
import { updateCandidates } from './recruit'
|
||||
@ -186,10 +186,49 @@ program
|
||||
})
|
||||
|
||||
program
|
||||
.command('backup-workspace <workspace> <dirName>')
|
||||
.command('backup <dirName> <workspace>')
|
||||
.description('dump workspace transactions and minio resources')
|
||||
.action(async (workspace, dirName, cmd) => {
|
||||
return await backupWorkspace(transactorUrl, workspace, dirName)
|
||||
.action(async (dirName, workspace, cmd) => {
|
||||
const storage = await createFileBackupStorage(dirName)
|
||||
return await backup(transactorUrl, workspace, storage)
|
||||
})
|
||||
|
||||
program
|
||||
.command('backup-restore <dirName> <workspace> [date]')
|
||||
.description('dump workspace transactions and minio resources')
|
||||
.action(async (dirName, workspace, date, cmd) => {
|
||||
const storage = await createFileBackupStorage(dirName)
|
||||
return await restore(transactorUrl, workspace, storage, parseInt(date ?? '-1'))
|
||||
})
|
||||
|
||||
program
|
||||
.command('backup-list <dirName>')
|
||||
.description('list snaphost ids for backup')
|
||||
.action(async (dirName, cmd) => {
|
||||
const storage = await createFileBackupStorage(dirName)
|
||||
return await backupList(storage)
|
||||
})
|
||||
|
||||
program
|
||||
.command('backup-s3 <bucketName> <dirName> <workspace>')
|
||||
.description('dump workspace transactions and minio resources')
|
||||
.action(async (bucketName, dirName, workspace, cmd) => {
|
||||
const storage = await createMinioBackupStorage(minio, bucketName, dirName)
|
||||
return await backup(transactorUrl, workspace, storage)
|
||||
})
|
||||
program
|
||||
.command('backup-s3-restore <bucketName>, <dirName> <workspace> [date]')
|
||||
.description('dump workspace transactions and minio resources')
|
||||
.action(async (bucketName, dirName, workspace, date, cmd) => {
|
||||
const storage = await createMinioBackupStorage(minio, bucketName, dirName)
|
||||
return await restore(transactorUrl, workspace, storage, parseInt(date ?? '-1'))
|
||||
})
|
||||
program
|
||||
.command('backup-s3-list <bucketName> <dirName>')
|
||||
.description('list snaphost ids for backup')
|
||||
.action(async (bucketName, dirName, cmd) => {
|
||||
const storage = await createMinioBackupStorage(minio, bucketName, dirName)
|
||||
return await backupList(storage)
|
||||
})
|
||||
|
||||
program
|
||||
|
@ -22,10 +22,10 @@ import recruit from '@anticrm/model-recruit'
|
||||
import { Candidate } from '@anticrm/recruit'
|
||||
import { ReconiDocument } from '@anticrm/rekoni'
|
||||
import { generateToken } from '@anticrm/server-token'
|
||||
import { connect } from '@anticrm/server-tool'
|
||||
import tags, { findTagCategory } from '@anticrm/tags'
|
||||
import { Client } from 'minio'
|
||||
import request from 'request'
|
||||
import { connect } from './connect'
|
||||
import { ElasticTool } from './elastic'
|
||||
import { findOrUpdateAttached } from './utils'
|
||||
import { readMinioData } from './workspace'
|
||||
|
@ -18,13 +18,18 @@ import {
|
||||
AnyAttribute,
|
||||
ArrOf,
|
||||
AttachedDoc,
|
||||
BlobData,
|
||||
Class,
|
||||
ClassifierKind,
|
||||
Collection,
|
||||
Doc,
|
||||
Domain,
|
||||
DOMAIN_MODEL,
|
||||
DOMAIN_BLOB,
|
||||
DOMAIN_FULLTEXT_BLOB,
|
||||
DOMAIN_MODEL,
|
||||
Enum,
|
||||
EnumOf,
|
||||
FullTextData,
|
||||
IndexKind,
|
||||
Interface,
|
||||
Mixin,
|
||||
@ -35,10 +40,7 @@ import {
|
||||
Space,
|
||||
Timestamp,
|
||||
Type,
|
||||
Version,
|
||||
BlobData,
|
||||
EnumOf,
|
||||
Enum
|
||||
Version
|
||||
} from '@anticrm/core'
|
||||
import { Hidden, Index, Model, Prop, TypeIntlString, TypeRef, TypeString, TypeTimestamp, UX } from '@anticrm/model'
|
||||
import type { IntlString } from '@anticrm/platform'
|
||||
@ -200,3 +202,8 @@ export class TBlobData extends TDoc implements BlobData {
|
||||
type!: string
|
||||
base64Data!: string
|
||||
}
|
||||
|
||||
@Model(core.class.FulltextData, core.class.Doc, DOMAIN_FULLTEXT_BLOB)
|
||||
export class TFulltextData extends TDoc implements FullTextData {
|
||||
data!: any
|
||||
}
|
||||
|
@ -23,6 +23,9 @@ import {
|
||||
TClass,
|
||||
TCollection,
|
||||
TDoc,
|
||||
TEnum,
|
||||
TEnumOf,
|
||||
TFulltextData,
|
||||
TInterface,
|
||||
TMixin,
|
||||
TObj,
|
||||
@ -36,9 +39,7 @@ import {
|
||||
TTypeNumber,
|
||||
TTypeString,
|
||||
TTypeTimestamp,
|
||||
TEnumOf,
|
||||
TVersion,
|
||||
TEnum
|
||||
TVersion
|
||||
} from './core'
|
||||
import { TAccount, TSpace } from './security'
|
||||
import { TUserStatus } from './transient'
|
||||
@ -96,6 +97,7 @@ export function createModel (builder: Builder): void {
|
||||
TPluginConfiguration,
|
||||
TUserStatus,
|
||||
TEnum,
|
||||
TBlobData
|
||||
TBlobData,
|
||||
TFulltextData
|
||||
)
|
||||
}
|
||||
|
@ -47,6 +47,9 @@ export class TTx extends TDoc implements Tx {
|
||||
objectSpace!: Ref<Space>
|
||||
}
|
||||
|
||||
@Model(core.class.TxModelUpgrade, core.class.Tx, DOMAIN_TX)
|
||||
export class TTxModelUpgrade extends TTx {}
|
||||
|
||||
@Model(core.class.TxCUD, core.class.Tx)
|
||||
export class TTxCUD<T extends Doc> extends TTx implements TxCUD<T> {
|
||||
@Index(IndexKind.Indexed)
|
||||
|
@ -62,6 +62,8 @@ export async function connect (handler: (tx: Tx) => void): Promise<ClientConnect
|
||||
digest: ''
|
||||
}),
|
||||
closeChunk: async (idx: number) => {},
|
||||
loadDocs: async (domain: Domain, docs: Ref<Doc>[]) => []
|
||||
loadDocs: async (domain: Domain, docs: Ref<Doc>[]) => [],
|
||||
upload: async (domain: Domain, docs: Doc[]) => {},
|
||||
clean: async (domain: Domain, docs: Ref<Doc>[]) => {}
|
||||
}
|
||||
}
|
||||
|
@ -20,4 +20,6 @@ export interface BackupClient {
|
||||
closeChunk: (idx: number) => Promise<void>
|
||||
|
||||
loadDocs: (domain: Domain, docs: Ref<Doc>[]) => Promise<Doc[]>
|
||||
upload: (domain: Domain, docs: Doc[]) => Promise<void>
|
||||
clean: (domain: Domain, docs: Ref<Doc>[]) => Promise<void>
|
||||
}
|
||||
|
@ -255,6 +255,12 @@ export const DOMAIN_TRANSIENT = 'transient' as Domain
|
||||
*/
|
||||
export const DOMAIN_BLOB = 'blob' as Domain
|
||||
|
||||
/**
|
||||
* Special domain to access s3 blob data.
|
||||
* @public
|
||||
*/
|
||||
export const DOMAIN_FULLTEXT_BLOB = 'fulltext-blob' as Domain
|
||||
|
||||
// S P A C E
|
||||
|
||||
/**
|
||||
@ -301,3 +307,11 @@ export interface BlobData extends Doc {
|
||||
type: string
|
||||
base64Data: string // base64 encoded data
|
||||
}
|
||||
|
||||
/**
|
||||
* Blob data from s3 storage
|
||||
* @public
|
||||
*/
|
||||
export interface FullTextData extends Doc {
|
||||
data: any
|
||||
}
|
||||
|
@ -132,6 +132,14 @@ class ClientImpl implements Client, BackupClient {
|
||||
async loadDocs (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return await this.conn.loadDocs(domain, docs)
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {
|
||||
return await this.conn.upload(domain, docs)
|
||||
}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {
|
||||
return await this.conn.clean(domain, docs)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -163,6 +171,7 @@ export async function createClient (
|
||||
{ objectSpace: core.space.Model },
|
||||
{ sort: { _id: SortingOrder.Ascending } }
|
||||
)
|
||||
console.log('find model', atxes.length)
|
||||
|
||||
let systemTx: Tx[] = []
|
||||
const userTx: Tx[] = []
|
||||
|
@ -26,6 +26,7 @@ import type {
|
||||
Doc,
|
||||
Enum,
|
||||
EnumOf,
|
||||
FullTextData,
|
||||
Interface,
|
||||
Obj,
|
||||
PluginConfiguration,
|
||||
@ -44,6 +45,7 @@ import type {
|
||||
TxCreateDoc,
|
||||
TxCUD,
|
||||
TxMixin,
|
||||
TxModelUpgrade,
|
||||
TxPutBag,
|
||||
TxRemoveDoc,
|
||||
TxUpdateDoc
|
||||
@ -64,6 +66,7 @@ export default plugin(coreId, {
|
||||
Interface: '' as Ref<Class<Interface<Doc>>>,
|
||||
Attribute: '' as Ref<Class<AnyAttribute>>,
|
||||
Tx: '' as Ref<Class<Tx>>,
|
||||
TxModelUpgrade: '' as Ref<Class<TxModelUpgrade>>,
|
||||
TxBulkWrite: '' as Ref<Class<TxBulkWrite>>,
|
||||
TxCUD: '' as Ref<Class<TxCUD<Doc>>>,
|
||||
TxCreateDoc: '' as Ref<Class<TxCreateDoc<Doc>>>,
|
||||
@ -91,7 +94,8 @@ export default plugin(coreId, {
|
||||
Version: '' as Ref<Class<Version>>,
|
||||
PluginConfiguration: '' as Ref<Class<PluginConfiguration>>,
|
||||
UserStatus: '' as Ref<Class<UserStatus>>,
|
||||
BlobData: '' as Ref<Class<BlobData>>
|
||||
BlobData: '' as Ref<Class<BlobData>>,
|
||||
FulltextData: '' as Ref<Class<FullTextData>>
|
||||
},
|
||||
space: {
|
||||
Tx: '' as Ref<Space>,
|
||||
|
@ -42,7 +42,16 @@ export interface StorageIterator {
|
||||
export interface LowLevelStorage {
|
||||
// Low level streaming API to retrieve information
|
||||
find: (domain: Domain) => StorageIterator
|
||||
|
||||
// Load passed documents from domain
|
||||
load: (domain: Domain, docs: Ref<Doc>[]) => Promise<Doc[]>
|
||||
|
||||
// Upload new versions of documents
|
||||
// docs - new/updated version of documents.
|
||||
upload: (domain: Domain, docs: Doc[]) => Promise<void>
|
||||
|
||||
// Remove a list of documents.
|
||||
clean: (domain: Domain, docs: Ref<Doc>[]) => Promise<void>
|
||||
}
|
||||
/**
|
||||
* @public
|
||||
|
@ -28,6 +28,12 @@ export interface Tx extends Doc {
|
||||
objectSpace: Ref<Space> // space where transaction will operate
|
||||
}
|
||||
|
||||
/**
|
||||
* Event to be send by server during model upgrade procedure.
|
||||
* @public
|
||||
*/
|
||||
export interface TxModelUpgrade extends Tx {}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
|
@ -22,7 +22,9 @@ export async function connect (title: string): Promise<Client | undefined> {
|
||||
}
|
||||
|
||||
const getClient = await getResource(client.function.GetClient)
|
||||
const instance = await getClient(token, endpoint)
|
||||
const instance = await getClient(token, endpoint, () => {
|
||||
location.reload()
|
||||
})
|
||||
console.log('logging in as', email)
|
||||
|
||||
const me = await instance.findOne(contact.class.EmployeeAccount, { email })
|
||||
|
@ -75,6 +75,8 @@ export async function connect (handler: (tx: Tx) => void): Promise<Client & Back
|
||||
digest: ''
|
||||
}),
|
||||
closeChunk: async (idx: number) => {},
|
||||
loadDocs: async (domain: Domain, docs: Ref<Doc>[]) => []
|
||||
loadDocs: async (domain: Domain, docs: Ref<Doc>[]) => [],
|
||||
upload: async (domain: Domain, docs: Doc[]) => {},
|
||||
clean: async (domain: Domain, docs: Ref<Doc>[]) => {}
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ import type {
|
||||
TxHander,
|
||||
TxResult
|
||||
} from '@anticrm/core'
|
||||
import core from '@anticrm/core'
|
||||
import { getMetadata, PlatformError, readResponse, ReqId, serialize } from '@anticrm/platform'
|
||||
|
||||
class DeferredPromise {
|
||||
@ -49,7 +50,11 @@ class Connection implements ClientConnection {
|
||||
private lastId = 0
|
||||
private readonly interval: number
|
||||
|
||||
constructor (private readonly url: string, private readonly handler: TxHander) {
|
||||
constructor (
|
||||
private readonly url: string,
|
||||
private readonly handler: TxHander,
|
||||
private readonly onUpgrade?: () => void
|
||||
) {
|
||||
console.log('connection created')
|
||||
this.interval = setInterval(() => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
@ -62,37 +67,69 @@ class Connection implements ClientConnection {
|
||||
this.websocket?.close()
|
||||
}
|
||||
|
||||
private openConnection (): Promise<ClientSocket> {
|
||||
// Use defined factory or browser default one.
|
||||
const clientSocketFactory =
|
||||
getMetadata(client.metadata.ClientSocketFactory) ?? ((url: string) => new WebSocket(url) as ClientSocket)
|
||||
private async waitOpenConnection (): Promise<ClientSocket> {
|
||||
while (true) {
|
||||
try {
|
||||
return await this.openConnection()
|
||||
} catch (err: any) {
|
||||
console.log('failed to connect')
|
||||
|
||||
const websocket = clientSocketFactory(this.url)
|
||||
websocket.onmessage = (event: MessageEvent) => {
|
||||
const resp = readResponse(event.data)
|
||||
if (resp.id !== undefined) {
|
||||
const promise = this.requests.get(resp.id)
|
||||
if (promise === undefined) {
|
||||
throw new Error(`unknown response id: ${resp.id}`)
|
||||
}
|
||||
this.requests.delete(resp.id)
|
||||
if (resp.error !== undefined) {
|
||||
promise.reject(new PlatformError(resp.error))
|
||||
} else {
|
||||
promise.resolve(resp.result)
|
||||
}
|
||||
} else {
|
||||
this.handler(resp.result as Tx)
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
resolve(null)
|
||||
}, 1000)
|
||||
})
|
||||
}
|
||||
}
|
||||
websocket.onclose = () => {
|
||||
console.log('client websocket closed')
|
||||
// clearInterval(interval)
|
||||
this.websocket = null
|
||||
}
|
||||
}
|
||||
|
||||
private openConnection (): Promise<ClientSocket> {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Use defined factory or browser default one.
|
||||
const clientSocketFactory =
|
||||
getMetadata(client.metadata.ClientSocketFactory) ?? ((url: string) => new WebSocket(url) as ClientSocket)
|
||||
|
||||
const websocket = clientSocketFactory(this.url)
|
||||
websocket.onmessage = (event: MessageEvent) => {
|
||||
const resp = readResponse(event.data)
|
||||
if (resp.id === -1 && resp.result === 'hello') {
|
||||
resolve(websocket)
|
||||
return
|
||||
}
|
||||
if (resp.id !== undefined) {
|
||||
const promise = this.requests.get(resp.id)
|
||||
if (promise === undefined) {
|
||||
throw new Error(`unknown response id: ${resp.id}`)
|
||||
}
|
||||
this.requests.delete(resp.id)
|
||||
if (resp.error !== undefined) {
|
||||
promise.reject(new PlatformError(resp.error))
|
||||
} else {
|
||||
promise.resolve(resp.result)
|
||||
}
|
||||
} else {
|
||||
const tx = resp.result as Tx
|
||||
if (tx._class === core.class.TxModelUpgrade) {
|
||||
this.onUpgrade?.()
|
||||
}
|
||||
this.handler(tx)
|
||||
}
|
||||
}
|
||||
websocket.onclose = () => {
|
||||
console.log('client websocket closed')
|
||||
// clearInterval(interval)
|
||||
this.websocket = null
|
||||
reject(new Error('websocket error'))
|
||||
}
|
||||
websocket.onopen = () => {
|
||||
resolve(websocket)
|
||||
console.log('connection opened...')
|
||||
websocket.send(
|
||||
serialize({
|
||||
method: 'hello',
|
||||
params: [],
|
||||
id: -1
|
||||
})
|
||||
)
|
||||
}
|
||||
websocket.onerror = (event: any) => {
|
||||
console.log('client websocket error', event)
|
||||
@ -103,7 +140,8 @@ class Connection implements ClientConnection {
|
||||
|
||||
private async sendRequest (method: string, ...params: any[]): Promise<any> {
|
||||
if (this.websocket === null) {
|
||||
this.websocket = await this.openConnection()
|
||||
console.log('open connection from', method, params)
|
||||
this.websocket = await this.waitOpenConnection()
|
||||
}
|
||||
const id = this.lastId++
|
||||
this.websocket.send(
|
||||
@ -141,11 +179,19 @@ class Connection implements ClientConnection {
|
||||
loadDocs (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return this.sendRequest('loadDocs', domain, docs)
|
||||
}
|
||||
|
||||
upload (domain: Domain, docs: Doc[]): Promise<void> {
|
||||
return this.sendRequest('upload', domain, docs)
|
||||
}
|
||||
|
||||
clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {
|
||||
return this.sendRequest('clean', domain, docs)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export async function connect (url: string, handler: TxHander): Promise<ClientConnection> {
|
||||
return new Connection(url, handler)
|
||||
export async function connect (url: string, handler: TxHander, onUpgrade?: () => void): Promise<ClientConnection> {
|
||||
return new Connection(url, handler, onUpgrade)
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ export default async () => {
|
||||
|
||||
return {
|
||||
function: {
|
||||
GetClient: async (token: string, endpoint: string): Promise<Client> => {
|
||||
GetClient: async (token: string, endpoint: string, onUpgrade?: () => void): Promise<Client> => {
|
||||
if (token !== _token && client !== undefined) {
|
||||
await client.close()
|
||||
client = undefined
|
||||
@ -43,10 +43,11 @@ export default async () => {
|
||||
(handler: TxHander) => {
|
||||
const url = new URL(`/${token}`, endpoint)
|
||||
console.log('connecting to', url.href)
|
||||
return connect(url.href, handler)
|
||||
return connect(url.href, handler, onUpgrade)
|
||||
},
|
||||
filterModel ? getPlugins() : undefined
|
||||
)
|
||||
console.log('client connection created')
|
||||
_token = token
|
||||
|
||||
// Check if we had dev hook for client.
|
||||
|
@ -52,7 +52,7 @@ export interface ClientSocket {
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export type ClientFactory = (token: string, endpoint: string) => Promise<Client>
|
||||
export type ClientFactory = (token: string, endpoint: string, onUpgrade?: () => void) => Promise<Client>
|
||||
|
||||
export default plugin(clientId, {
|
||||
metadata: {
|
||||
|
@ -16,7 +16,7 @@
|
||||
* path segment in the "$schema" field for all your Rush config files. This will ensure
|
||||
* correct error-underlining and tab-completion for editors such as VS Code.
|
||||
*/
|
||||
"rushVersion": "5.66.2",
|
||||
"rushVersion": "5.71.0",
|
||||
|
||||
/**
|
||||
* The next field selects which package manager should be installed and determines its version.
|
||||
@ -1302,6 +1302,11 @@
|
||||
"packageName": "@anticrm/middleware",
|
||||
"projectFolder": "server/middleware",
|
||||
"shouldPublish": true
|
||||
},
|
||||
{
|
||||
"packageName": "@anticrm/server-backup",
|
||||
"projectFolder": "server/backup",
|
||||
"shouldPublish": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -30,7 +30,7 @@
|
||||
"dependencies": {
|
||||
"mongodb": "^4.1.1",
|
||||
"@anticrm/platform": "~0.6.6",
|
||||
"minio": "~7.0.26",
|
||||
"minio": "^7.0.26",
|
||||
"@anticrm/core": "~0.6.16",
|
||||
"@anticrm/contact": "~0.6.5",
|
||||
"@anticrm/client-resources": "~0.6.4",
|
||||
|
@ -394,7 +394,7 @@ export async function assignWorkspace (db: Db, email: string, workspace: string)
|
||||
}
|
||||
|
||||
async function createEmployeeAccount (account: Account, workspace: string): Promise<void> {
|
||||
const connection = await connect(getTransactor(), workspace, false, account.email)
|
||||
const connection = await connect(getTransactor(), workspace, account.email)
|
||||
try {
|
||||
const ops = new TxOperations(connection, core.account.System)
|
||||
|
||||
@ -472,7 +472,7 @@ export async function changeName (db: Db, token: string, first: string, last: st
|
||||
}
|
||||
|
||||
async function updateEmployeeAccount (account: Account, workspace: string): Promise<void> {
|
||||
const connection = await connect(getTransactor(), workspace, false, account.email)
|
||||
const connection = await connect(getTransactor(), workspace, account.email)
|
||||
try {
|
||||
const ops = new TxOperations(connection, core.account.System)
|
||||
|
||||
|
7
server/backup/.eslintrc.js
Normal file
7
server/backup/.eslintrc.js
Normal file
@ -0,0 +1,7 @@
|
||||
module.exports = {
|
||||
extends: ['./node_modules/@anticrm/platform-rig/profiles/default/config/eslint.config.json'],
|
||||
parserOptions: {
|
||||
tsconfigRootDir: __dirname,
|
||||
project: './tsconfig.json'
|
||||
}
|
||||
}
|
4
server/backup/.npmignore
Normal file
4
server/backup/.npmignore
Normal file
@ -0,0 +1,4 @@
|
||||
*
|
||||
!/lib/**
|
||||
!CHANGELOG.md
|
||||
/lib/**/__tests__/
|
18
server/backup/config/rig.json
Normal file
18
server/backup/config/rig.json
Normal file
@ -0,0 +1,18 @@
|
||||
// The "rig.json" file directs tools to look for their config files in an external package.
|
||||
// Documentation for this system: https://www.npmjs.com/package/@rushstack/rig-package
|
||||
{
|
||||
"$schema": "https://developer.microsoft.com/json-schemas/rig-package/rig.schema.json",
|
||||
|
||||
/**
|
||||
* (Required) The name of the rig package to inherit from.
|
||||
* It should be an NPM package name with the "-rig" suffix.
|
||||
*/
|
||||
"rigPackageName": "@anticrm/platform-rig"
|
||||
|
||||
/**
|
||||
* (Optional) Selects a config profile from the rig package. The name must consist of
|
||||
* lowercase alphanumeric words separated by hyphens, for example "sample-profile".
|
||||
* If omitted, then the "default" profile will be used."
|
||||
*/
|
||||
// "rigProfile": "your-profile-name"
|
||||
}
|
42
server/backup/package.json
Normal file
42
server/backup/package.json
Normal file
@ -0,0 +1,42 @@
|
||||
{
|
||||
"name": "@anticrm/server-backup",
|
||||
"version": "0.6.0",
|
||||
"main": "lib/index.js",
|
||||
"author": "Anticrm Platform Contributors",
|
||||
"license": "EPL-2.0",
|
||||
"scripts": {
|
||||
"build": "heft build",
|
||||
"build:watch": "tsc",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"lint": "eslint src",
|
||||
"format": "prettier --write src && eslint --fix src"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@anticrm/platform-rig": "~0.6.0",
|
||||
"@types/heft-jest": "^1.0.2",
|
||||
"@typescript-eslint/eslint-plugin": "^5.21.0",
|
||||
"eslint-plugin-import": "^2.25.3",
|
||||
"eslint-plugin-promise": "^5.1.1",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint": "^7.32.0",
|
||||
"@typescript-eslint/parser": "^5.4.0",
|
||||
"eslint-config-standard-with-typescript": "^21.0.1",
|
||||
"prettier": "^2.4.1",
|
||||
"@rushstack/heft": "^0.44.13",
|
||||
"typescript": "^4.3.5",
|
||||
"@types/tar-stream": "^2.2.2",
|
||||
"@types/node": "~16.11.12",
|
||||
"@types/minio": "~7.0.11"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anticrm/platform": "~0.6.6",
|
||||
"@anticrm/core": "~0.6.16",
|
||||
"@anticrm/contact": "~0.6.5",
|
||||
"@anticrm/client-resources": "~0.6.4",
|
||||
"@anticrm/client": "~0.6.2",
|
||||
"@anticrm/model": "~0.6.0",
|
||||
"tar-stream": "^2.2.0",
|
||||
"@anticrm/server-tool": "~0.6.0",
|
||||
"minio": "^7.0.26"
|
||||
}
|
||||
}
|
483
server/backup/src/index.ts
Normal file
483
server/backup/src/index.ts
Normal file
@ -0,0 +1,483 @@
|
||||
//
|
||||
// Copyright © 2020, 2021 Anticrm Platform Contributors.
|
||||
// Copyright © 2021 Hardcore Engineering Inc.
|
||||
//
|
||||
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License. You may
|
||||
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
//
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import core, {
|
||||
BackupClient,
|
||||
BlobData,
|
||||
Client as CoreClient,
|
||||
Doc,
|
||||
Domain,
|
||||
DOMAIN_MODEL,
|
||||
DOMAIN_TRANSIENT,
|
||||
Ref
|
||||
} from '@anticrm/core'
|
||||
import { createGzip } from 'node:zlib'
|
||||
import { join } from 'path'
|
||||
import { extract, Pack, pack } from 'tar-stream'
|
||||
import { createGunzip, gunzipSync, gzipSync } from 'zlib'
|
||||
import { connect } from '@anticrm/server-tool'
|
||||
import { BackupStorage } from './storage'
|
||||
export * from './storage'
|
||||
|
||||
const dataBlobSize = 50 * 1024 * 1024
|
||||
const dataUploadSize = 2 * 1024 * 1024
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export interface Snapshot {
|
||||
added: Record<Ref<Doc>, string>
|
||||
updated: Record<Ref<Doc>, string>
|
||||
removed: Ref<Doc>[]
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export interface DomainData {
|
||||
snapshot?: string
|
||||
storage?: string[]
|
||||
|
||||
// Some statistics
|
||||
added: number
|
||||
updated: number
|
||||
removed: number
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export interface BackupSnapshot {
|
||||
// _id => hash of added items.
|
||||
domains: Record<Domain, DomainData>
|
||||
date: number
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export interface BackupInfo {
|
||||
workspace: string
|
||||
version: string
|
||||
snapshots: BackupSnapshot[]
|
||||
}
|
||||
|
||||
async function loadDigest (
|
||||
storage: BackupStorage,
|
||||
snapshots: BackupSnapshot[],
|
||||
domain: Domain,
|
||||
date?: number
|
||||
): Promise<Map<Ref<Doc>, string>> {
|
||||
const result = new Map<Ref<Doc>, string>()
|
||||
for (const s of snapshots) {
|
||||
const d = s.domains[domain]
|
||||
if (d?.snapshot !== undefined) {
|
||||
const dChanges: Snapshot = JSON.parse(gunzipSync(await storage.loadFile(d.snapshot)).toString())
|
||||
for (const [k, v] of Object.entries(dChanges.added)) {
|
||||
result.set(k as Ref<Doc>, v)
|
||||
}
|
||||
for (const [k, v] of Object.entries(dChanges.updated)) {
|
||||
result.set(k as Ref<Doc>, v)
|
||||
}
|
||||
for (const d of dChanges.removed) {
|
||||
result.delete(d)
|
||||
}
|
||||
}
|
||||
// Stop if stop date is matched and provided
|
||||
if (date !== undefined && date === s.date) {
|
||||
break
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export async function backup (transactorUrl: string, dbName: string, storage: BackupStorage): Promise<void> {
|
||||
const connection = (await connect(transactorUrl, dbName, undefined, {
|
||||
mode: 'backup'
|
||||
})) as unknown as CoreClient & BackupClient
|
||||
try {
|
||||
const domains = connection
|
||||
.getHierarchy()
|
||||
.domains()
|
||||
.filter((it) => it !== DOMAIN_TRANSIENT && it !== DOMAIN_MODEL)
|
||||
|
||||
let backupInfo: BackupInfo = {
|
||||
workspace: dbName,
|
||||
version: '0.6',
|
||||
snapshots: []
|
||||
}
|
||||
const infoFile = 'backup.json.gz'
|
||||
|
||||
if (await storage.exists(infoFile)) {
|
||||
backupInfo = JSON.parse(gunzipSync(await storage.loadFile(infoFile)).toString())
|
||||
}
|
||||
|
||||
backupInfo.workspace = dbName
|
||||
|
||||
const snapshot: BackupSnapshot = {
|
||||
date: Date.now(),
|
||||
domains: {}
|
||||
}
|
||||
|
||||
backupInfo.snapshots.push(snapshot)
|
||||
let backupIndex = `${backupInfo.snapshots.length}`
|
||||
while (backupIndex.length < 6) {
|
||||
backupIndex = '0' + backupIndex
|
||||
}
|
||||
|
||||
for (const c of domains) {
|
||||
console.log('dumping domain...', c)
|
||||
|
||||
const changes: Snapshot = {
|
||||
added: {},
|
||||
updated: {},
|
||||
removed: []
|
||||
}
|
||||
let changed = 0
|
||||
let stIndex = 0
|
||||
const domainInfo: Required<DomainData> = {
|
||||
snapshot: join(backupIndex, `${c}-${snapshot.date}.json.gz`),
|
||||
storage: [],
|
||||
added: 0,
|
||||
updated: 0,
|
||||
removed: 0
|
||||
}
|
||||
|
||||
// Comulative digest
|
||||
const digest = await loadDigest(storage, backupInfo.snapshots, c)
|
||||
|
||||
let idx: number | undefined
|
||||
|
||||
let _pack: Pack | undefined
|
||||
let addedDocuments = 0
|
||||
|
||||
// update digest tar
|
||||
while (true) {
|
||||
const it = await connection.loadChunk(c, idx)
|
||||
idx = it.idx
|
||||
|
||||
const needRetrieve: Ref<Doc>[] = []
|
||||
|
||||
for (const [k, v] of Object.entries(it.docs)) {
|
||||
const kHash = digest.get(k as Ref<Doc>)
|
||||
if (kHash !== undefined) {
|
||||
digest.delete(k as Ref<Doc>)
|
||||
if (kHash !== v) {
|
||||
changes.updated[k as Ref<Doc>] = v
|
||||
needRetrieve.push(k as Ref<Doc>)
|
||||
changed++
|
||||
}
|
||||
} else {
|
||||
changes.added[k as Ref<Doc>] = v
|
||||
needRetrieve.push(k as Ref<Doc>)
|
||||
changed++
|
||||
}
|
||||
}
|
||||
if (needRetrieve.length > 0) {
|
||||
const docs = await connection.loadDocs(c, needRetrieve)
|
||||
|
||||
// Chunk data into small pieces
|
||||
if (addedDocuments > dataBlobSize && _pack !== undefined) {
|
||||
_pack.finalize()
|
||||
_pack = undefined
|
||||
addedDocuments = 0
|
||||
}
|
||||
if (_pack === undefined) {
|
||||
_pack = pack()
|
||||
stIndex++
|
||||
const storageFile = join(backupIndex, `${c}-data-${snapshot.date}-${stIndex}.tar.gz`)
|
||||
console.log('storing from domain', c, storageFile)
|
||||
domainInfo.storage.push(storageFile)
|
||||
const dataStream = await storage.write(storageFile)
|
||||
const storageZip = createGzip()
|
||||
|
||||
_pack.pipe(storageZip)
|
||||
storageZip.pipe(dataStream)
|
||||
}
|
||||
|
||||
for (const d of docs) {
|
||||
if (d._class === core.class.BlobData) {
|
||||
const blob = d as BlobData
|
||||
const data = Buffer.from(blob.base64Data, 'base64')
|
||||
blob.base64Data = ''
|
||||
const descrJson = JSON.stringify(d)
|
||||
addedDocuments += descrJson.length
|
||||
addedDocuments += data.length
|
||||
_pack.entry({ name: d._id + '.json' }, descrJson, function (err) {
|
||||
if (err != null) throw err
|
||||
})
|
||||
_pack.entry({ name: d._id }, data, function (err) {
|
||||
if (err != null) throw err
|
||||
})
|
||||
} else {
|
||||
const data = JSON.stringify(d)
|
||||
addedDocuments += data.length
|
||||
_pack.entry({ name: d._id + '.json' }, data, function (err) {
|
||||
if (err != null) throw err
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (it.finished) {
|
||||
break
|
||||
}
|
||||
}
|
||||
changes.removed = Array.from(digest.keys())
|
||||
if (changes.removed.length > 0) {
|
||||
changed++
|
||||
}
|
||||
|
||||
if (changed > 0) {
|
||||
snapshot.domains[c] = domainInfo
|
||||
domainInfo.added = Object.keys(changes.added).length
|
||||
domainInfo.updated = Object.keys(changes.updated).length
|
||||
domainInfo.removed = changes.removed.length
|
||||
await storage.writeFile(domainInfo.snapshot, gzipSync(JSON.stringify(changes)))
|
||||
_pack?.finalize()
|
||||
}
|
||||
}
|
||||
|
||||
await storage.writeFile(infoFile, gzipSync(JSON.stringify(backupInfo, undefined, 2)))
|
||||
} finally {
|
||||
await connection.close()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export async function backupList (storage: BackupStorage): Promise<void> {
|
||||
const infoFile = 'backup.json.gz'
|
||||
|
||||
if (!(await storage.exists(infoFile))) {
|
||||
throw new Error(`${infoFile} should present to restore`)
|
||||
}
|
||||
const backupInfo: BackupInfo = JSON.parse(gunzipSync(await storage.loadFile(infoFile)).toString())
|
||||
console.log('workspace:', backupInfo.workspace ?? '', backupInfo.version)
|
||||
for (const s of backupInfo.snapshots) {
|
||||
console.log('snapshot: id:', s.date, ' date:', new Date(s.date))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
* Restore state of DB to specified point.
|
||||
*/
|
||||
export async function restore (
|
||||
transactorUrl: string,
|
||||
dbName: string,
|
||||
storage: BackupStorage,
|
||||
date: number
|
||||
): Promise<void> {
|
||||
const infoFile = 'backup.json.gz'
|
||||
|
||||
if (!(await storage.exists(infoFile))) {
|
||||
throw new Error(`${infoFile} should present to restore`)
|
||||
}
|
||||
const backupInfo: BackupInfo = JSON.parse(gunzipSync(await storage.loadFile(infoFile)).toString())
|
||||
let snapshots = backupInfo.snapshots
|
||||
if (date !== -1) {
|
||||
const bk = backupInfo.snapshots.findIndex((it) => it.date === date)
|
||||
if (bk === -1) {
|
||||
throw new Error(`${infoFile} could not restore to ${date}. Snapshot is missing.`)
|
||||
}
|
||||
snapshots = backupInfo.snapshots.slice(0, bk + 1)
|
||||
} else {
|
||||
date = snapshots[snapshots.length - 1].date
|
||||
}
|
||||
console.log('restore to ', date, new Date(date))
|
||||
const rsnapshots = Array.from(snapshots).reverse()
|
||||
|
||||
// Collect all possible domains
|
||||
const domains = new Set<Domain>()
|
||||
for (const s of snapshots) {
|
||||
Object.keys(s.domains).forEach((it) => domains.add(it as Domain))
|
||||
}
|
||||
|
||||
const connection = (await connect(transactorUrl, dbName, undefined, {
|
||||
mode: 'backup',
|
||||
model: 'upgrade'
|
||||
})) as unknown as CoreClient & BackupClient
|
||||
try {
|
||||
for (const c of domains) {
|
||||
console.log('loading server changeset for', c)
|
||||
const changeset = await loadDigest(storage, snapshots, c, date)
|
||||
// We need to load full changeset from server
|
||||
const serverChangeset = new Map<Ref<Doc>, string>()
|
||||
|
||||
let idx: number | undefined
|
||||
let loaded = 0
|
||||
let last = 0
|
||||
while (true) {
|
||||
const it = await connection.loadChunk(c, idx)
|
||||
idx = it.idx
|
||||
|
||||
for (const [_id, hash] of Object.entries(it.docs)) {
|
||||
serverChangeset.set(_id as Ref<Doc>, hash)
|
||||
loaded++
|
||||
}
|
||||
const mr = Math.round(loaded / 10000)
|
||||
if (mr !== last) {
|
||||
last = mr
|
||||
console.log(' loaded', loaded)
|
||||
}
|
||||
if (it.finished) {
|
||||
break
|
||||
}
|
||||
}
|
||||
console.log(' loaded', loaded)
|
||||
console.log('\tcompare documents', changeset.size, serverChangeset.size)
|
||||
|
||||
// Let's find difference
|
||||
const docsToAdd = new Map(
|
||||
Array.from(changeset.entries()).filter(
|
||||
([it]) =>
|
||||
!serverChangeset.has(it) || (serverChangeset.has(it) && serverChangeset.get(it) !== changeset.get(it))
|
||||
)
|
||||
)
|
||||
const docsToRemove = Array.from(serverChangeset.keys()).filter((it) => !changeset.has(it))
|
||||
|
||||
const docs: Doc[] = []
|
||||
const blobs = new Map<string, { doc: Doc | undefined, buffer: Buffer | undefined }>()
|
||||
let sendSize = 0
|
||||
let totalSend = 0
|
||||
async function sendChunk (doc: Doc | undefined, len: number): Promise<void> {
|
||||
if (doc !== undefined) {
|
||||
docsToAdd.delete(doc._id)
|
||||
docs.push(doc)
|
||||
}
|
||||
sendSize = sendSize + len
|
||||
if (sendSize > dataUploadSize || (doc === undefined && docs.length > 0)) {
|
||||
console.log('upload', docs.length, `send: ${totalSend} from ${docsToAdd.size + totalSend}`, 'size:', sendSize)
|
||||
totalSend += docs.length
|
||||
await connection.upload(c, docs)
|
||||
docs.length = 0
|
||||
sendSize = 0
|
||||
}
|
||||
}
|
||||
let processed = 0
|
||||
|
||||
for (const s of rsnapshots) {
|
||||
const d = s.domains[c]
|
||||
|
||||
if (d !== undefined && docsToAdd.size > 0) {
|
||||
const sDigest = await loadDigest(storage, [s], c)
|
||||
const requiredDocs = new Map(Array.from(sDigest.entries()).filter(([it]) => docsToAdd.has(it)))
|
||||
if (requiredDocs.size > 0) {
|
||||
console.log('updating', c, requiredDocs.size)
|
||||
// We have required documents here.
|
||||
for (const sf of d.storage ?? []) {
|
||||
if (docsToAdd.size === 0) {
|
||||
break
|
||||
}
|
||||
console.log('processing', sf, processed)
|
||||
|
||||
const readStream = await storage.load(sf)
|
||||
const ex = extract()
|
||||
|
||||
ex.on('entry', (headers, stream, next) => {
|
||||
const name = headers.name ?? ''
|
||||
processed++
|
||||
// We found blob data
|
||||
if (requiredDocs.has(name as Ref<Doc>)) {
|
||||
const chunks: Buffer[] = []
|
||||
stream.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
})
|
||||
stream.on('end', () => {
|
||||
const bf = Buffer.concat(chunks)
|
||||
const d = blobs.get(name)
|
||||
if (d === undefined) {
|
||||
blobs.set(name, { doc: undefined, buffer: bf })
|
||||
next()
|
||||
} else {
|
||||
const d = blobs.get(name)
|
||||
blobs.delete(name)
|
||||
const doc = d?.doc as BlobData
|
||||
doc.base64Data = bf.toString('base64') ?? ''
|
||||
sendChunk(doc, bf.length).finally(() => {
|
||||
requiredDocs.delete(doc._id)
|
||||
next()
|
||||
})
|
||||
}
|
||||
})
|
||||
} else if (name.endsWith('.json') && requiredDocs.has(name.substring(0, name.length - 5) as Ref<Doc>)) {
|
||||
const chunks: Buffer[] = []
|
||||
const bname = name.substring(0, name.length - 5)
|
||||
stream.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
})
|
||||
stream.on('end', () => {
|
||||
const bf = Buffer.concat(chunks)
|
||||
const doc = JSON.parse(bf.toString()) as Doc
|
||||
if (doc._class === core.class.BlobData) {
|
||||
const d = blobs.get(bname)
|
||||
if (d === undefined) {
|
||||
blobs.set(bname, { doc, buffer: undefined })
|
||||
next()
|
||||
} else {
|
||||
const d = blobs.get(bname)
|
||||
blobs.delete(bname)
|
||||
;(doc as BlobData).base64Data = d?.buffer?.toString('base64') ?? ''
|
||||
sendChunk(doc, bf.length).finally(() => {
|
||||
requiredDocs.delete(doc._id)
|
||||
next()
|
||||
})
|
||||
}
|
||||
} else {
|
||||
sendChunk(doc, bf.length).finally(() => {
|
||||
requiredDocs.delete(doc._id)
|
||||
next()
|
||||
})
|
||||
}
|
||||
})
|
||||
} else {
|
||||
next()
|
||||
}
|
||||
stream.resume() // just auto drain the stream
|
||||
})
|
||||
|
||||
const endPromise = new Promise((resolve) => {
|
||||
ex.on('finish', () => {
|
||||
resolve(null)
|
||||
})
|
||||
})
|
||||
const unzip = createGunzip()
|
||||
readStream.pipe(unzip)
|
||||
unzip.pipe(ex)
|
||||
|
||||
await endPromise
|
||||
}
|
||||
} else {
|
||||
console.log('domain had no changes', c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await sendChunk(undefined, 0)
|
||||
if (docsToRemove.length > 0) {
|
||||
console.log('cleanup', docsToRemove.length)
|
||||
await connection.clean(c, docsToRemove)
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
await connection.close()
|
||||
}
|
||||
}
|
121
server/backup/src/storage.ts
Normal file
121
server/backup/src/storage.ts
Normal file
@ -0,0 +1,121 @@
|
||||
import { createReadStream, createWriteStream, existsSync } from 'fs'
|
||||
import { mkdir, readFile, writeFile } from 'fs/promises'
|
||||
import { Client as MinioClient } from 'minio'
|
||||
import { dirname, join } from 'path'
|
||||
import { PassThrough, Readable, Writable } from 'stream'
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export interface BackupStorage {
|
||||
loadFile: (name: string) => Promise<Buffer>
|
||||
load: (name: string) => Promise<Readable>
|
||||
write: (name: string) => Promise<Writable>
|
||||
writeFile: (name: string, data: string | Buffer) => Promise<void>
|
||||
exists: (name: string) => Promise<boolean>
|
||||
}
|
||||
|
||||
class FileStorage implements BackupStorage {
|
||||
constructor (readonly root: string) {}
|
||||
async loadFile (name: string): Promise<Buffer> {
|
||||
return await readFile(join(this.root, name))
|
||||
}
|
||||
|
||||
async write (name: string): Promise<Writable> {
|
||||
const fileName = join(this.root, name)
|
||||
const dir = dirname(fileName)
|
||||
if (!existsSync(dir)) {
|
||||
await mkdir(dir, { recursive: true })
|
||||
}
|
||||
|
||||
return createWriteStream(join(this.root, name))
|
||||
}
|
||||
|
||||
async load (name: string): Promise<Readable> {
|
||||
return createReadStream(join(this.root, name))
|
||||
}
|
||||
|
||||
async exists (name: string): Promise<boolean> {
|
||||
return existsSync(join(this.root, name))
|
||||
}
|
||||
|
||||
async writeFile (name: string, data: string | Buffer): Promise<void> {
|
||||
const fileName = join(this.root, name)
|
||||
const dir = dirname(fileName)
|
||||
if (!existsSync(dir)) {
|
||||
await mkdir(dir, { recursive: true })
|
||||
}
|
||||
|
||||
await writeFile(fileName, data)
|
||||
}
|
||||
}
|
||||
|
||||
class MinioStorage implements BackupStorage {
|
||||
constructor (readonly client: MinioClient, readonly bucketName: string, readonly root: string) {}
|
||||
async loadFile (name: string): Promise<Buffer> {
|
||||
const data = await this.client.getObject(this.bucketName, join(this.root, name))
|
||||
const chunks: Buffer[] = []
|
||||
|
||||
await new Promise((resolve) => {
|
||||
data.on('readable', () => {
|
||||
let chunk
|
||||
while ((chunk = data.read()) !== null) {
|
||||
const b = chunk as Buffer
|
||||
chunks.push(b)
|
||||
}
|
||||
})
|
||||
|
||||
data.on('end', () => {
|
||||
resolve(null)
|
||||
})
|
||||
})
|
||||
return Buffer.concat(chunks)
|
||||
}
|
||||
|
||||
async write (name: string): Promise<Writable> {
|
||||
const wr = new PassThrough()
|
||||
void this.client.putObject(this.bucketName, join(this.root, name), wr)
|
||||
return wr
|
||||
}
|
||||
|
||||
async load (name: string): Promise<Readable> {
|
||||
return await this.client.getObject(this.bucketName, join(this.root, name))
|
||||
}
|
||||
|
||||
async exists (name: string): Promise<boolean> {
|
||||
try {
|
||||
await this.client.statObject(this.bucketName, join(this.root, name))
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async writeFile (name: string, data: string | Buffer): Promise<void> {
|
||||
void this.client.putObject(this.bucketName, join(this.root, name), data, data.length)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export async function createFileBackupStorage (fileName: string): Promise<BackupStorage> {
|
||||
if (!existsSync(fileName)) {
|
||||
await mkdir(fileName, { recursive: true })
|
||||
}
|
||||
return new FileStorage(fileName)
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export async function createMinioBackupStorage (
|
||||
client: MinioClient,
|
||||
bucketName: string,
|
||||
root: string
|
||||
): Promise<BackupStorage> {
|
||||
if (!(await client.bucketExists(bucketName))) {
|
||||
await client.makeBucket(bucketName, 'k8s')
|
||||
}
|
||||
return new MinioStorage(client, bucketName, root)
|
||||
}
|
8
server/backup/tsconfig.json
Normal file
8
server/backup/tsconfig.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"extends": "./node_modules/@anticrm/platform-rig/profiles/default/tsconfig.json",
|
||||
|
||||
"compilerOptions": {
|
||||
"rootDir": "./src",
|
||||
"outDir": "./lib"
|
||||
}
|
||||
}
|
@ -30,6 +30,6 @@
|
||||
"dependencies": {
|
||||
"@anticrm/core": "~0.6.16",
|
||||
"@anticrm/platform": "~0.6.6",
|
||||
"minio": "~7.0.26"
|
||||
"minio": "^7.0.26"
|
||||
}
|
||||
}
|
||||
|
@ -48,6 +48,8 @@ export interface DbAdapter {
|
||||
find: (domain: Domain) => StorageIterator
|
||||
|
||||
load: (domain: Domain, docs: Ref<Doc>[]) => Promise<Doc[]>
|
||||
upload: (domain: Domain, docs: Doc[]) => Promise<void>
|
||||
clean: (domain: Domain, docs: Ref<Doc>[]) => Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
@ -114,6 +116,10 @@ class InMemoryAdapter implements DbAdapter {
|
||||
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -83,4 +83,12 @@ class TPipeline implements Pipeline {
|
||||
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return await this.storage.load(domain, docs)
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {
|
||||
await this.storage.upload(domain, docs)
|
||||
}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {
|
||||
await this.storage.clean(domain, docs)
|
||||
}
|
||||
}
|
||||
|
@ -405,6 +405,14 @@ class TServerStorage implements ServerStorage {
|
||||
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return await this.getAdapter(domain).load(domain, docs)
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {
|
||||
await this.getAdapter(domain).upload(domain, docs)
|
||||
}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {
|
||||
await this.getAdapter(domain).clean(domain, docs)
|
||||
}
|
||||
}
|
||||
|
||||
type Effect = () => Promise<void>
|
||||
|
210
server/elastic/src/backup.ts
Normal file
210
server/elastic/src/backup.ts
Normal file
@ -0,0 +1,210 @@
|
||||
//
|
||||
// Copyright © 2022 Hardcore Engineering Inc.
|
||||
//
|
||||
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License. You may
|
||||
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
//
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import core, {
|
||||
Class,
|
||||
Doc,
|
||||
DocumentQuery,
|
||||
Domain,
|
||||
FindOptions,
|
||||
FindResult,
|
||||
FullTextData,
|
||||
Hierarchy,
|
||||
Ref,
|
||||
Space,
|
||||
StorageIterator,
|
||||
Tx,
|
||||
TxResult
|
||||
} from '@anticrm/core'
|
||||
import { DbAdapter, IndexedDoc } from '@anticrm/server-core'
|
||||
import { ApiResponse, Client } from '@elastic/elasticsearch'
|
||||
import { createHash } from 'node:crypto'
|
||||
|
||||
class ElasticDataAdapter implements DbAdapter {
|
||||
constructor (readonly db: string, readonly client: Client) {}
|
||||
|
||||
async findAll<T extends Doc>(
|
||||
_class: Ref<Class<T>>,
|
||||
query: DocumentQuery<T>,
|
||||
options?: FindOptions<T>
|
||||
): Promise<FindResult<T>> {
|
||||
return Object.assign([], { total: 0 })
|
||||
}
|
||||
|
||||
async tx (tx: Tx): Promise<TxResult> {
|
||||
return {}
|
||||
}
|
||||
|
||||
async init (model: Tx[]): Promise<void> {}
|
||||
|
||||
async close (): Promise<void> {
|
||||
await this.client.close()
|
||||
}
|
||||
|
||||
find (domain: Domain): StorageIterator {
|
||||
let listRecieved = false
|
||||
let pos = 0
|
||||
let buffer: { _id: string, data: IndexedDoc }[] = []
|
||||
let resp: ApiResponse
|
||||
let finished = false
|
||||
return {
|
||||
next: async () => {
|
||||
if (!listRecieved) {
|
||||
resp = await this.client.search({
|
||||
index: this.db,
|
||||
type: '_doc',
|
||||
scroll: '1s',
|
||||
// search_type: 'scan', //if I use search_type then it requires size otherwise it shows 0 result
|
||||
size: 10000,
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
})
|
||||
buffer = resp.body.hits.hits.map((hit: any) => ({ _id: hit._id, data: hit._source }))
|
||||
if (buffer.length === 0) {
|
||||
finished = true
|
||||
}
|
||||
listRecieved = true
|
||||
}
|
||||
if (pos === buffer.length && !finished) {
|
||||
const params = {
|
||||
scrollId: resp.body._scroll_id as string,
|
||||
scroll: '1s'
|
||||
}
|
||||
resp = await this.client.scroll(params)
|
||||
buffer = resp.body.hits.hits.map((hit: any) => ({ _id: hit._id, data: hit._source }))
|
||||
if (buffer.length === 0) {
|
||||
finished = true
|
||||
}
|
||||
pos = 0
|
||||
}
|
||||
if (pos < buffer.length) {
|
||||
const item = buffer[pos]
|
||||
const hash = createHash('sha256')
|
||||
const json = JSON.stringify(item.data)
|
||||
hash.update(json)
|
||||
const digest = hash.digest('base64')
|
||||
const result = {
|
||||
id: item._id,
|
||||
hash: digest,
|
||||
size: json.length
|
||||
}
|
||||
pos++
|
||||
return result
|
||||
}
|
||||
},
|
||||
close: async () => {}
|
||||
}
|
||||
}
|
||||
|
||||
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
const result: Doc[] = []
|
||||
|
||||
const resp = await this.client.search({
|
||||
index: this.db,
|
||||
type: '_doc',
|
||||
body: {
|
||||
query: {
|
||||
terms: {
|
||||
_id: docs,
|
||||
boost: 1.0
|
||||
}
|
||||
},
|
||||
size: docs.length
|
||||
}
|
||||
})
|
||||
const buffer = resp.body.hits.hits.map((hit: any) => ({ _id: hit._id, data: hit._source }))
|
||||
|
||||
for (const item of buffer) {
|
||||
const dta: FullTextData = {
|
||||
_id: item._id as Ref<FullTextData>,
|
||||
_class: core.class.FulltextData,
|
||||
space: 'fulltext-blob' as Ref<Space>,
|
||||
modifiedOn: item.data.modifiedOn,
|
||||
modifiedBy: item.data.modifiedBy,
|
||||
data: item.data
|
||||
}
|
||||
result.push(dta)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {
|
||||
while (docs.length > 0) {
|
||||
const part = docs.splice(0, 10000)
|
||||
await this.client.deleteByQuery(
|
||||
{
|
||||
type: '_doc',
|
||||
index: this.db,
|
||||
body: {
|
||||
query: {
|
||||
terms: {
|
||||
_id: Array.from(part.map((it) => it._id)),
|
||||
boost: 1.0
|
||||
}
|
||||
},
|
||||
size: part.length
|
||||
}
|
||||
},
|
||||
undefined
|
||||
)
|
||||
|
||||
const operations = part.flatMap((doc) => [
|
||||
{ index: { _index: this.db, _id: doc._id } },
|
||||
(doc as FullTextData).data
|
||||
])
|
||||
|
||||
await this.client.bulk({ refresh: true, body: operations })
|
||||
}
|
||||
}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {
|
||||
while (docs.length > 0) {
|
||||
const part = docs.splice(0, 10000)
|
||||
await this.client.deleteByQuery(
|
||||
{
|
||||
type: '_doc',
|
||||
index: this.db,
|
||||
body: {
|
||||
query: {
|
||||
terms: {
|
||||
_id: part,
|
||||
boost: 1.0
|
||||
}
|
||||
},
|
||||
size: part.length
|
||||
}
|
||||
},
|
||||
undefined
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export async function createElasticBackupDataAdapter (
|
||||
hierarchy: Hierarchy,
|
||||
url: string,
|
||||
db: string
|
||||
): Promise<DbAdapter> {
|
||||
const client = new Client({
|
||||
node: url
|
||||
})
|
||||
return new ElasticDataAdapter(db, client)
|
||||
}
|
@ -15,3 +15,4 @@
|
||||
//
|
||||
|
||||
export { createElasticAdapter } from './adapter'
|
||||
export { createElasticBackupDataAdapter } from './backup'
|
||||
|
@ -53,7 +53,7 @@
|
||||
"@anticrm/server-token": "~0.6.0",
|
||||
"@anticrm/attachment": "~0.6.1",
|
||||
"@anticrm/contrib": "~0.6.0",
|
||||
"minio": "~7.0.26",
|
||||
"minio": "^7.0.26",
|
||||
"body-parser": "~1.19.1",
|
||||
"compression": "~1.7.4"
|
||||
}
|
||||
|
@ -76,6 +76,10 @@ class NullDbAdapter implements DbAdapter {
|
||||
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {}
|
||||
}
|
||||
|
||||
async function createNullAdapter (hierarchy: Hierarchy, url: string, db: string, modelDb: ModelDb): Promise<DbAdapter> {
|
||||
@ -185,7 +189,9 @@ describe('mongo operations', () => {
|
||||
close: async () => {},
|
||||
loadChunk: async (domain): Promise<DocChunk> => await Promise.reject(new Error('unsupported')),
|
||||
closeChunk: async (idx) => {},
|
||||
loadDocs: async (domain: Domain, docs: Ref<Doc>[]) => []
|
||||
loadDocs: async (domain: Domain, docs: Ref<Doc>[]) => [],
|
||||
upload: async (domain: Domain, docs: Doc[]) => {},
|
||||
clean: async (domain: Domain, docs: Ref<Doc>[]) => {}
|
||||
}
|
||||
return st
|
||||
})
|
||||
|
@ -46,9 +46,8 @@ import core, {
|
||||
} from '@anticrm/core'
|
||||
import type { DbAdapter, TxAdapter } from '@anticrm/server-core'
|
||||
import { Collection, Db, Document, Filter, MongoClient, Sort } from 'mongodb'
|
||||
import { getMongoClient } from './utils'
|
||||
|
||||
import { createHash } from 'node:crypto'
|
||||
import { getMongoClient } from './utils'
|
||||
|
||||
function translateDoc (doc: Doc): Document {
|
||||
return doc as Document
|
||||
@ -457,6 +456,20 @@ abstract class MongoAdapterBase extends TxProcessor {
|
||||
.find<Doc>({ _id: { $in: docs } })
|
||||
.toArray()
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {
|
||||
const coll = this.db.collection(domain)
|
||||
|
||||
const docMap = new Map(docs.map((it) => [it._id, it]))
|
||||
|
||||
// remove old and insert new ones
|
||||
await coll.deleteMany({ _id: { $in: Array.from(docMap.keys()) } })
|
||||
await coll.insertMany(Array.from(docMap.values()) as Document[])
|
||||
}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {
|
||||
await this.db.collection(domain).deleteMany({ _id: { $in: docs } })
|
||||
}
|
||||
}
|
||||
|
||||
class MongoAdapter extends MongoAdapterBase {
|
||||
|
@ -48,7 +48,7 @@
|
||||
"@anticrm/mongo": "~0.6.1",
|
||||
"@anticrm/elastic": "~0.6.0",
|
||||
"elastic-apm-node": "~3.26.0",
|
||||
"minio": "~7.0.26",
|
||||
"minio": "^7.0.26",
|
||||
"@anticrm/server-contact": "~0.6.1",
|
||||
"@anticrm/server-contact-resources": "~0.6.0",
|
||||
"@anticrm/server-notification": "~0.6.0",
|
||||
|
@ -81,4 +81,12 @@ export class BackupClientSession extends ClientSession implements BackupSession
|
||||
async loadDocs (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return await this._pipeline.storage.load(domain, docs)
|
||||
}
|
||||
|
||||
async upload (ctx: MeasureContext, domain: Domain, docs: Doc[]): Promise<void> {
|
||||
return await this._pipeline.storage.upload(domain, docs)
|
||||
}
|
||||
|
||||
async clean (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<void> {
|
||||
return await this._pipeline.storage.clean(domain, docs)
|
||||
}
|
||||
}
|
||||
|
@ -131,6 +131,32 @@ class MinioBlobAdapter implements DbAdapter {
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {
|
||||
// Find documents to be updated
|
||||
for (const d of docs) {
|
||||
if (d._class !== core.class.BlobData) {
|
||||
// Skip non blob data documents
|
||||
continue
|
||||
}
|
||||
const blob = d as unknown as BlobData
|
||||
// Remove existing document
|
||||
try {
|
||||
await this.client.removeObject(this.db, blob._id)
|
||||
} catch (ee) {
|
||||
// ignore error
|
||||
}
|
||||
const buffer = Buffer.from(blob.base64Data, 'base64')
|
||||
await this.client.putObject(this.db, blob._id, buffer, buffer.length, {
|
||||
'Content-Type': blob.type,
|
||||
lastModified: new Date(blob.modifiedOn)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {
|
||||
await this.client.removeObjects(this.db, docs)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -19,6 +19,7 @@ import {
|
||||
DocumentQuery,
|
||||
Domain,
|
||||
DOMAIN_BLOB,
|
||||
DOMAIN_FULLTEXT_BLOB,
|
||||
DOMAIN_MODEL,
|
||||
DOMAIN_TRANSIENT,
|
||||
DOMAIN_TX,
|
||||
@ -32,7 +33,7 @@ import {
|
||||
Tx,
|
||||
TxResult
|
||||
} from '@anticrm/core'
|
||||
import { createElasticAdapter } from '@anticrm/elastic'
|
||||
import { createElasticAdapter, createElasticBackupDataAdapter } from '@anticrm/elastic'
|
||||
import { ModifiedMiddleware, PrivateMiddleware } from '@anticrm/middleware'
|
||||
import { createMongoAdapter, createMongoTxAdapter } from '@anticrm/mongo'
|
||||
import { addLocation } from '@anticrm/platform'
|
||||
@ -91,6 +92,10 @@ class NullDbAdapter implements DbAdapter {
|
||||
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
async upload (domain: Domain, docs: Doc[]): Promise<void> {}
|
||||
|
||||
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {}
|
||||
}
|
||||
|
||||
async function createNullAdapter (hierarchy: Hierarchy, url: string, db: string, modelDb: ModelDb): Promise<DbAdapter> {
|
||||
@ -141,6 +146,7 @@ export function start (
|
||||
[DOMAIN_TX]: 'MongoTx',
|
||||
[DOMAIN_TRANSIENT]: 'InMemory',
|
||||
[DOMAIN_BLOB]: 'MinioData',
|
||||
[DOMAIN_FULLTEXT_BLOB]: 'FullTextBlob',
|
||||
[DOMAIN_MODEL]: 'Null'
|
||||
},
|
||||
defaultAdapter: 'Mongo',
|
||||
@ -164,6 +170,10 @@ export function start (
|
||||
MinioData: {
|
||||
factory: createMinioDataAdapter,
|
||||
url: ''
|
||||
},
|
||||
FullTextBlob: {
|
||||
factory: createElasticBackupDataAdapter,
|
||||
url: fullTextUrl
|
||||
}
|
||||
},
|
||||
fulltextAdapter: {
|
||||
|
@ -30,7 +30,7 @@
|
||||
"dependencies": {
|
||||
"@anticrm/core": "~0.6.16",
|
||||
"@anticrm/platform": "~0.6.6",
|
||||
"minio": "~7.0.26",
|
||||
"minio": "^7.0.26",
|
||||
"jwt-simple": "~0.5.6"
|
||||
}
|
||||
}
|
||||
|
@ -31,7 +31,7 @@
|
||||
"mongodb": "^4.1.1",
|
||||
"@anticrm/platform": "~0.6.6",
|
||||
"@anticrm/model-all": "~0.6.0",
|
||||
"minio": "~7.0.26",
|
||||
"minio": "^7.0.26",
|
||||
"@anticrm/core": "~0.6.16",
|
||||
"@anticrm/contact": "~0.6.5",
|
||||
"@anticrm/client-resources": "~0.6.4",
|
||||
|
@ -26,14 +26,10 @@ import { generateToken } from '@anticrm/server-token'
|
||||
export async function connect (
|
||||
transactorUrl: string,
|
||||
workspace: string,
|
||||
reloadModel: boolean,
|
||||
email?: string
|
||||
email?: string,
|
||||
extra?: Record<string, string>
|
||||
): Promise<Client> {
|
||||
const token = generateToken(
|
||||
email ?? 'anticrm@hc.engineering',
|
||||
workspace,
|
||||
reloadModel ? { model: 'upgrade' } : undefined
|
||||
)
|
||||
const token = generateToken(email ?? 'anticrm@hc.engineering', workspace, extra)
|
||||
|
||||
// We need to override default factory with 'ws' one.
|
||||
// eslint-disable-next-line
|
||||
|
@ -97,7 +97,7 @@ export async function initModel (transactorUrl: string, dbName: string): Promise
|
||||
console.log(`${result.insertedCount} model transactions inserted.`)
|
||||
|
||||
console.log('creating data...')
|
||||
const connection = await connect(transactorUrl, dbName, true)
|
||||
const connection = await connect(transactorUrl, dbName, undefined, { model: 'upgrade' })
|
||||
try {
|
||||
for (const op of migrateOperations) {
|
||||
await op.upgrade(connection)
|
||||
@ -156,7 +156,7 @@ export async function upgradeModel (transactorUrl: string, dbName: string): Prom
|
||||
|
||||
console.log('Apply upgrade operations')
|
||||
|
||||
const connection = await connect(transactorUrl, dbName, true)
|
||||
const connection = await connect(transactorUrl, dbName, undefined, { model: 'upgrade' })
|
||||
|
||||
// Create update indexes
|
||||
await createUpdateIndexes(connection, db)
|
||||
|
@ -73,7 +73,9 @@ describe('server', () => {
|
||||
next: async () => undefined,
|
||||
close: async () => {}
|
||||
}),
|
||||
load: async (domain: Domain, docs: Ref<Doc>[]) => []
|
||||
load: async (domain: Domain, docs: Ref<Doc>[]) => [],
|
||||
upload: async (domain: Domain, docs: Doc[]) => {},
|
||||
clean: async (domain: Domain, docs: Ref<Doc>[]) => {}
|
||||
}),
|
||||
(token, pipeline, broadcast) => new ClientSession(broadcast, token, pipeline),
|
||||
3333
|
||||
|
@ -58,6 +58,13 @@ class SessionManager {
|
||||
// Drop all existing clients
|
||||
if (workspace.sessions.length > 0) {
|
||||
for (const s of workspace.sessions) {
|
||||
s[1].send(
|
||||
serialize({
|
||||
result: {
|
||||
_class: core.class.TxModelUpgrade
|
||||
}
|
||||
})
|
||||
)
|
||||
await this.close(ctx, s[1], token.workspace, 0, 'upgrade')
|
||||
}
|
||||
}
|
||||
@ -65,6 +72,7 @@ class SessionManager {
|
||||
}
|
||||
|
||||
if (workspace.upgrade) {
|
||||
ws.close()
|
||||
throw new Error('Upgrade in progress....')
|
||||
}
|
||||
|
||||
@ -134,6 +142,7 @@ class SessionManager {
|
||||
if (index !== -1) {
|
||||
const session = workspace.sessions[index]
|
||||
workspace.sessions.splice(index, 1)
|
||||
session[1].close()
|
||||
const user = session[0].getUser()
|
||||
const another = workspace.sessions.findIndex((p) => p[0].getUser() === user)
|
||||
if (another === -1) {
|
||||
@ -142,7 +151,7 @@ class SessionManager {
|
||||
if (workspace.sessions.length === 0) {
|
||||
if (LOGGING_ENABLED) console.log('no sessions for workspace', workspaceId)
|
||||
this.workspaces.delete(workspaceId)
|
||||
workspace.pipeline.close().catch((err) => console.error(err))
|
||||
await workspace.pipeline.close().catch((err) => console.error(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -174,6 +183,10 @@ async function handleRequest<S extends Session> (
|
||||
msg: string
|
||||
): Promise<void> {
|
||||
const request = readRequest(msg)
|
||||
if (request.id === -1 && request.method === 'hello') {
|
||||
ws.send(serialize({ id: -1, result: 'hello' }))
|
||||
return
|
||||
}
|
||||
const f = (service as any)[request.method]
|
||||
try {
|
||||
const params = [ctx, ...request.params]
|
||||
|
Loading…
Reference in New Issue
Block a user