Init scripts with unified import format (#7242)

* Integrate import to workspace initialization

Signed-off-by: Anna Khismatullina <anna.khismatullina@gmail.com>

* Support importing drawing along with attachments

Signed-off-by: Anna Khismatullina <anna.khismatullina@gmail.com>

* Put init scripts into workspace container

Signed-off-by: Anna Khismatullina <anna.khismatullina@gmail.com>

---------

Signed-off-by: Anna Khismatullina <anna.khismatullina@gmail.com>
This commit is contained in:
Anna Khismatullina 2024-12-12 18:11:09 +07:00 committed by GitHub
parent 331af1cbf6
commit 57ee462ca6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
51 changed files with 883 additions and 194 deletions

View File

@ -42,6 +42,7 @@ env:
tools
PublishTempFolder: publish_artifacts
MODEL_VERSION_MODE: ${{ startsWith(github.ref, 'refs/tags/s') && 'tagTime' || 'file' }}
INIT_SCRIPTS_BRANCH: 'unified-init-scripts'
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
@ -493,6 +494,14 @@ jobs:
with:
fetch-depth: 0
filter: tree:0
- name: Checkout init repository
run: |
wget https://github.com/hcengineering/init/archive/refs/heads/${{env.INIT_SCRIPTS_BRANCH}}.zip
unzip ${{env.INIT_SCRIPTS_BRANCH}}.zip -d pods/workspace
mv pods/workspace/init-${{env.INIT_SCRIPTS_BRANCH}} pods/workspace/init
rm -rf ${{env.INIT_SCRIPTS_BRANCH}}.zip
- uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'

2
.vscode/launch.json vendored
View File

@ -624,7 +624,7 @@
"name": "Debug Huly import",
"type": "node",
"request": "launch",
"args": ["src/__start.ts", "import", "/home/anna/xored/huly/platform/dev/import-tool/src/huly/example-workspace", "-u", "user1", "-pw", "1234", "-ws", "ws12"],
"args": ["src/__start.ts", "import", "/home/anna/huly/platform/dev/import-tool/docs/huly/example-workspace", "-u", "user1", "-pw", "1234", "-ws", "ws1"],
"env": {
"FRONT_URL": "http://localhost:8087"
},

View File

@ -8,7 +8,7 @@
"editor.defaultFormatter": "dbaeumer.vscode-eslint"
},
"[json]": {
"editor.defaultFormatter": "dbaeumer.vscode-eslint"
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[markdown]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"

View File

@ -296,6 +296,9 @@ dependencies:
'@rush-temp/import-tool':
specifier: file:./projects/import-tool.tgz
version: file:projects/import-tool.tgz
'@rush-temp/importer':
specifier: file:./projects/importer.tgz
version: file:projects/importer.tgz(esbuild@0.20.1)(ts-node@10.9.2)
'@rush-temp/inventory':
specifier: file:./projects/inventory.tgz
version: file:projects/inventory.tgz(@types/node@20.11.19)(esbuild@0.20.1)(ts-node@10.9.2)
@ -1640,6 +1643,9 @@ dependencies:
htmlparser2:
specifier: ^9.0.0
version: 9.1.0
image-size:
specifier: ^1.1.1
version: 1.1.1
intl-messageformat:
specifier: ^9.7.1
version: 9.13.0
@ -5242,6 +5248,10 @@ packages:
resolution: {integrity: sha512-dyHY+sMF0ihPus3O27ODd4+agdHMEmuRdyiZJ2CCWjPV5UFmn17ZbElvk6WOGVE4rdCJKZQCrPV2BcikOMLUGQ==}
dev: false
/@sec-ant/readable-stream@0.4.1:
resolution: {integrity: sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==}
dev: false
/@selderee/plugin-htmlparser2@0.11.0:
resolution: {integrity: sha512-P33hHGdldxGabLFjPPpaTxVolMrzrcegejx+0GxjrIb9Zv48D8yAIA/QTDR2dFl7Uz7urX8aX6+5bCZslr+gWQ==}
dependencies:
@ -12476,6 +12486,16 @@ packages:
token-types: 4.2.1
dev: false
/file-type@19.6.0:
resolution: {integrity: sha512-VZR5I7k5wkD0HgFnMsq5hOsSc710MJMu5Nc5QYsbe38NN5iPV/XTObYLc/cpttRTf6lX538+5uO1ZQRhYibiZQ==}
engines: {node: '>=18'}
dependencies:
get-stream: 9.0.1
strtok3: 9.1.1
token-types: 6.0.0
uint8array-extras: 1.4.0
dev: false
/filelist@1.0.4:
resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==}
dependencies:
@ -12897,6 +12917,14 @@ packages:
engines: {node: '>=10'}
dev: false
/get-stream@9.0.1:
resolution: {integrity: sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==}
engines: {node: '>=18'}
dependencies:
'@sec-ant/readable-stream': 0.4.1
is-stream: 4.0.1
dev: false
/get-symbol-description@1.0.2:
resolution: {integrity: sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==}
engines: {node: '>= 0.4'}
@ -13654,6 +13682,14 @@ packages:
'@types/node': 16.9.1
dev: false
/image-size@1.1.1:
resolution: {integrity: sha512-541xKlUw6jr/6gGuk92F+mYM5zaFAc5ahphvkqvNe2bQ6gVBkd6bfrmVJ2t4KDAfikAYZyIqTnktX3i6/aQDrQ==}
engines: {node: '>=16.x'}
hasBin: true
dependencies:
queue: 6.0.2
dev: false
/immediate@3.0.6:
resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==}
dev: false
@ -14070,6 +14106,11 @@ packages:
engines: {node: '>=8'}
dev: false
/is-stream@4.0.1:
resolution: {integrity: sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==}
engines: {node: '>=18'}
dev: false
/is-string@1.0.7:
resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==}
engines: {node: '>= 0.4'}
@ -17235,6 +17276,11 @@ packages:
engines: {node: '>=8'}
dev: false
/peek-readable@5.3.1:
resolution: {integrity: sha512-GVlENSDW6KHaXcd9zkZltB7tCLosKB/4Hg0fqBJkAoBgYG2Tn1xtMgXtSUuMU9AK/gCm/tTdT8mgAeF4YNeeqw==}
engines: {node: '>=14.16'}
dev: false
/pend@1.2.0:
resolution: {integrity: sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==}
dev: false
@ -18106,6 +18152,12 @@ packages:
requiresBuild: true
dev: false
/queue@6.0.2:
resolution: {integrity: sha512-iHZWu+q3IdFZFX36ro/lKBkSvfkztY5Y7HMiPlOUjhupPcG2JMfst2KKEpu5XndviX/3UhFbRngUPNKtgvtZiA==}
dependencies:
inherits: 2.0.4
dev: false
/quick-format-unescaped@4.0.4:
resolution: {integrity: sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==}
dev: false
@ -19540,6 +19592,14 @@ packages:
peek-readable: 4.1.0
dev: false
/strtok3@9.1.1:
resolution: {integrity: sha512-FhwotcEqjr241ZbjFzjlIYg6c5/L/s4yBGWSMvJ9UoExiSqL+FnFA/CaeZx17WGaZMS/4SOZp8wH18jSS4R4lw==}
engines: {node: '>=16'}
dependencies:
'@tokenizer/token': 0.3.0
peek-readable: 5.3.1
dev: false
/style-loader@3.3.4(webpack@5.97.1):
resolution: {integrity: sha512-0WqXzrsMTyb8yjZJHDqwmnwRJvhALK9LfRtRc6B4UTWe8AijYLZYZ9thuJTZc2VfQWINADW/j+LiJnfy2RoC1w==}
engines: {node: '>= 12.13.0'}
@ -20092,6 +20152,14 @@ packages:
ieee754: 1.2.1
dev: false
/token-types@6.0.0:
resolution: {integrity: sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA==}
engines: {node: '>=14.16'}
dependencies:
'@tokenizer/token': 0.3.0
ieee754: 1.2.1
dev: false
/toposort@2.0.2:
resolution: {integrity: sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg==}
dev: false
@ -20619,6 +20687,11 @@ packages:
resolution: {integrity: sha512-IevTus0SbGwQzYh3+fRsAMTVVPOoIVufzacXcHPmdlle1jUpq7BRL+mw3dgeLanvGZdwwbWhRV6XrcFNdBmjWA==}
dev: false
/uint8array-extras@1.4.0:
resolution: {integrity: sha512-ZPtzy0hu4cZjv3z5NW9gfKnNLjoz4y6uv4HlelAjDK7sY/xOkKZv9xK/WQpcsBB3jEybChz9DPC2U/+cusjJVQ==}
engines: {node: '>=18'}
dev: false
/unbox-primitive@1.0.2:
resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==}
dependencies:
@ -21807,7 +21880,7 @@ packages:
dev: false
file:projects/account-service.tgz:
resolution: {integrity: sha512-Cyt9smf3yBf80n9zd8D77piZd9wnJIYLjzSJzSfcZLA+w4WPacwDMZEU9hN3Pf9aAIpeaaVpuZqXSvnOoBvv4A==, tarball: file:projects/account-service.tgz}
resolution: {integrity: sha512-Kxz4jt8+j+W4r7hgcpPq5D1wVL5G4AmpEWF6yxyUTR6+vxQERCTILfJo6wRbecPBvEPcFvA7Snogo8exY2YJQg==, tarball: file:projects/account-service.tgz}
name: '@rush-temp/account-service'
version: 0.0.0
dependencies:
@ -24718,7 +24791,7 @@ packages:
dev: false
file:projects/import-tool.tgz:
resolution: {integrity: sha512-0Q1/hHZxEdYFPr2qqfovlVJRA8JyvWsKtY3ubHrffnaAMbbN6BNWt6Jf+GzwyGeIwImLI2Oud2x/WqOFb/USdg==, tarball: file:projects/import-tool.tgz}
resolution: {integrity: sha512-Pkbjr/0XN1X7G8Gi6vwh0DjhRr8f0un2HSW2ef6ZMsIv2MyH8BgE2erUpXmzHETxSKsyJllAm6T1DmfSa26XIg==, tarball: file:projects/import-tool.tgz}
name: '@rush-temp/import-tool'
version: 0.0.0
dependencies:
@ -24765,6 +24838,45 @@ packages:
- supports-color
dev: false
file:projects/importer.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-H8zT+HFFD2VV3Fo3ALiiDtd0VtXQZpWEfm10a5udm4D07Gv4DgxJOLl8uF4wNId//6UNKOp6/z517p8D/8velg==, tarball: file:projects/importer.tgz}
id: file:projects/importer.tgz
name: '@rush-temp/importer'
version: 0.0.0
dependencies:
'@types/csvtojson': 2.0.0
'@types/jest': 29.5.12
'@types/js-yaml': 4.0.9
'@types/mime-types': 2.1.4
'@types/node': 20.11.19
'@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.6.2)
'@typescript-eslint/parser': 6.21.0(eslint@8.56.0)(typescript@5.6.2)
commander: 8.3.0
csvtojson: 2.0.10
eslint: 8.56.0
eslint-config-standard-with-typescript: 40.0.0(@typescript-eslint/eslint-plugin@6.21.0)(eslint-plugin-import@2.29.1)(eslint-plugin-n@15.7.0)(eslint-plugin-promise@6.1.1)(eslint@8.56.0)(typescript@5.6.2)
eslint-plugin-import: 2.29.1(eslint@8.56.0)
eslint-plugin-n: 15.7.0(eslint@8.56.0)
eslint-plugin-promise: 6.1.1(eslint@8.56.0)
file-type: 19.6.0
image-size: 1.1.1
jest: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2)
js-yaml: 4.1.0
mime-types: 2.1.35
prettier: 3.2.5
ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.6.2)
typescript: 5.6.2
transitivePeerDependencies:
- '@babel/core'
- '@jest/types'
- babel-jest
- babel-plugin-macros
- esbuild
- node-notifier
- supports-color
- ts-node
dev: false
file:projects/inventory-assets.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-Uayr70kuiNfwBgNoFcu1rkWhdHhnbg7aEDcbozbhn5Eyel/B6he2uUYxPZc2gl1VUiEA8KBGszSAEXMs0YER0A==, tarball: file:projects/inventory-assets.tgz}
id: file:projects/inventory-assets.tgz
@ -31166,7 +31278,7 @@ packages:
dev: false
file:projects/server-tool.tgz(@types/node@20.11.19)(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-Pd5QWUOAPkgU3vgG4J4yXrQJuskV46yXxzATYcWPzY+4LQRJ4jshHlld/rI2EGJKJNr5fy6trH1p8LJ+xvLdeA==, tarball: file:projects/server-tool.tgz}
resolution: {integrity: sha512-FwqM3BepKqMKvnizEtaWgyv6YTuhwVDaJ4yPF+DEDLqHmxdeGC5u2Akj+sIdP0qtmSRQy9ts5NMwJw2SVNIetQ==, tarball: file:projects/server-tool.tgz}
id: file:projects/server-tool.tgz
name: '@rush-temp/server-tool'
version: 0.0.0

View File

@ -1,3 +1,3 @@
STORAGE_CONFIG="minio|minio?accessKey=minioadmin&secretKey=minioadmin"
MONGO_URL=mongodb://mongodb:27017?compressors=snappy
DB_URL_PG=postgresql://postgres:example@postgres:5432
DB_URL_PG=postgresql://postgres:example@postgres:5432

View File

@ -138,7 +138,6 @@ services:
- ACCOUNTS_URL=http://host.docker.internal:3000
- BRANDING_PATH=/var/cfg/branding.json
# - PARALLEL=2
- INIT_SCRIPT_URL=https://raw.githubusercontent.com/hcengineering/init/main/script.yaml
- INIT_WORKSPACE=test
restart: unless-stopped
workspacepg:
@ -165,7 +164,6 @@ services:
- ACCOUNTS_URL=http://host.docker.internal:3000
- BRANDING_PATH=/var/cfg/branding.json
# - PARALLEL=2
- INIT_SCRIPT_URL=https://raw.githubusercontent.com/hcengineering/init/main/script.yaml
# - INIT_WORKSPACE=onboarding
restart: unless-stopped
workspace_cockroach:
@ -191,7 +189,6 @@ services:
- ACCOUNTS_URL=http://host.docker.internal:3000
- BRANDING_PATH=/var/cfg/branding.json
# - PARALLEL=2
- INIT_SCRIPT_URL=https://raw.githubusercontent.com/hcengineering/init/main/script.yaml
# - INIT_WORKSPACE=onboarding
restart: unless-stopped
collaborator:

View File

@ -5,9 +5,9 @@ Tool for importing data into Huly workspace.
## Recommended Import Method
### Unified Format Import
The recommended way to import data into Huly is using our [Unified Import Format](./src/huly/README.md). This format provides a straightforward way to migrate data from any system by converting it into an intermediate, human-readable structure.
The recommended way to import data into Huly is using our [Unified Import Format](./docs/huly/README.md). This format provides a straightforward way to migrate data from any system by converting it into an intermediate, human-readable structure.
See the [complete guide](./src/huly/README.md) and [example workspace](./src/huly/example-workspace) to get started.
See the [complete guide](./docs/huly/README.md) and [example workspace](./docs/huly/example-workspace) to get started.
### Why Use Unified Format?
- Simple, human-readable format using YAML and Markdown
@ -19,7 +19,7 @@ See the [complete guide](./src/huly/README.md) and [example workspace](./src/hul
We also support direct import from some platforms:
1. **Notion**: see [Import from Notion Guide](./src/notion/README.md)
2. **ClickUp**: see [Import from ClickUp Guide](./src/clickup/README.md)
1. **Notion**: see [Import from Notion Guide](./docs/notion/README.md)
2. **ClickUp**: see [Import from ClickUp Guide](./docs/clickup/README.md)
These direct imports are suitable for simple migrations, but for complex cases or systems not listed above, please use the Unified Format.

View File

@ -1,5 +1,6 @@
class: document:class:Teamspace
title: Documentation
emoji: 📖
private: false
autoJoin: true
owners:

View File

@ -1,6 +1,7 @@
class: tracker:class:Project
title: Project Alpha
identifier: ALPHA
emoji: 🦄
private: false
autoJoin: true
owners:

View File

@ -16,6 +16,8 @@ comments:
- author: john.doe@example.com
text: |
Perfect, don't forget to update [documentation](../Documentation/User%20Guide/Installation.md) when you're done.
attachments:
- ./files/screenshot.png
---
**Initial project infrastructure setup.**

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

View File

@ -0,0 +1,55 @@
{
"class": "drawing:class:Drawing",
"content": [
{
"lineWidth": 4,
"erasing": false,
"penColor": "#0000ff",
"points": [
{ "x": 213, "y": 181.5 },
{ "x": 203, "y": 181.5 },
{ "x": 193, "y": 181.5 },
{ "x": 168, "y": 181.5 },
{ "x": 136, "y": 181.5 },
{ "x": 108, "y": 181.5 },
{ "x": 84, "y": 188.5 },
{ "x": 59, "y": 196.5 },
{ "x": 42, "y": 205.5 },
{ "x": 35, "y": 220.5 },
{ "x": 32, "y": 227.5 },
{ "x": 31, "y": 233.5 },
{ "x": 31, "y": 241.5 },
{ "x": 31, "y": 248.5 },
{ "x": 34, "y": 257.5 },
{ "x": 41, "y": 269.5 },
{ "x": 53, "y": 280.5 },
{ "x": 69, "y": 290.5 },
{ "x": 84, "y": 297.5 },
{ "x": 112, "y": 305.5 },
{ "x": 190, "y": 319.5 },
{ "x": 250, "y": 323.5 },
{ "x": 314, "y": 323.5 },
{ "x": 372, "y": 317.5 },
{ "x": 410, "y": 303.5 },
{ "x": 424, "y": 292.5 },
{ "x": 427, "y": 279.5 },
{ "x": 427, "y": 265.5 },
{ "x": 425, "y": 250.5 },
{ "x": 413, "y": 225.5 },
{ "x": 407, "y": 214.5 },
{ "x": 398, "y": 203.5 },
{ "x": 380, "y": 189.5 },
{ "x": 361, "y": 181.5 },
{ "x": 317, "y": 168.5 },
{ "x": 261, "y": 154.5 },
{ "x": 207, "y": 148.5 },
{ "x": 168, "y": 148.5 },
{ "x": 153, "y": 148.5 },
{ "x": 147, "y": 151.5 },
{ "x": 143, "y": 157.5 },
{ "x": 142, "y": 165.5 },
{ "x": 142, "y": 165.5 }
]
}
]
}

View File

@ -34,43 +34,26 @@
"cross-env": "~7.0.3",
"@hcengineering/platform-rig": "^0.6.0",
"@typescript-eslint/eslint-plugin": "^6.11.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-promise": "^6.1.1",
"eslint-plugin-n": "^15.4.0",
"eslint": "^8.54.0",
"ts-node": "^10.8.0",
"esbuild": "^0.20.0",
"@types/mime-types": "~2.1.1",
"@types/node": "~20.11.16",
"@typescript-eslint/parser": "^6.11.0",
"eslint-config-standard-with-typescript": "^40.0.0",
"prettier": "^3.1.0",
"typescript": "^5.3.3",
"jest": "^29.7.0",
"ts-jest": "^29.1.1",
"@types/jest": "^29.5.5",
"@types/csvtojson": "^2.0.0",
"@types/js-yaml": "^4.0.9"
"eslint-config-standard-with-typescript": "^40.0.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-n": "^15.4.0",
"eslint-plugin-promise": "^6.1.1",
"prettier": "^3.1.0"
},
"dependencies": {
"@hcengineering/attachment": "^0.6.14",
"@hcengineering/collaboration": "^0.6.0",
"@hcengineering/document": "^0.6.0",
"@hcengineering/text": "^0.6.5",
"@hcengineering/model-attachment": "^0.6.0",
"@hcengineering/model-core": "^0.6.0",
"@hcengineering/core": "^0.6.32",
"@hcengineering/platform": "^0.6.11",
"@hcengineering/server-tool": "^0.6.0",
"@hcengineering/server-client": "^0.6.0",
"@hcengineering/rank": "^0.6.4",
"@hcengineering/tracker": "^0.6.24",
"commander": "^8.1.0",
"mime-types": "~2.1.34",
"csvtojson": "^2.0.10",
"@hcengineering/task": "^0.6.20",
"@hcengineering/contact": "^0.6.24",
"@hcengineering/chunter": "^0.6.20",
"js-yaml": "^4.1.0"
"@hcengineering/importer": "^0.6.1",
"commander": "^8.1.0"
}
}

View File

@ -20,11 +20,29 @@ import serverClientPlugin, {
selectWorkspace
} from '@hcengineering/server-client'
import { program } from 'commander'
import { importNotion } from './notion/notion'
import { setMetadata } from '@hcengineering/platform'
import { FrontFileUploader, type FileUploader } from './importer/uploader'
import { ClickupImporter } from './clickup/clickup'
import { UnifiedFormatImporter } from './huly/unified'
import {
UnifiedFormatImporter,
ClickupImporter,
importNotion,
FrontFileUploader,
type FileUploader,
type Logger
} from '@hcengineering/importer'
class ConsoleLogger implements Logger {
log (msg: string, data?: any): void {
console.log(msg, data)
}
warn (msg: string, data?: any): void {
console.warn(msg, data)
}
error (msg: string, data?: any): void {
console.error(msg, data)
}
}
/**
* @public
@ -127,7 +145,7 @@ export function importTool (): void {
.action(async (file: string, cmd) => {
const { workspace, user, password } = cmd
await authorize(user, password, workspace, async (client, uploader) => {
const importer = new ClickupImporter(client, uploader)
const importer = new ClickupImporter(client, uploader, new ConsoleLogger())
await importer.importClickUpTasks(file)
})
})
@ -142,7 +160,7 @@ export function importTool (): void {
.action(async (dir: string, cmd) => {
const { workspace, user, password } = cmd
await authorize(user, password, workspace, async (client, uploader) => {
const importer = new UnifiedFormatImporter(client, uploader)
const importer = new UnifiedFormatImporter(client, uploader, new ConsoleLogger())
await importer.importFolder(dir)
})
})

View File

@ -60,7 +60,7 @@ import serverClientPlugin, {
} from '@hcengineering/server-client'
import { createBackupPipeline, getConfig } from '@hcengineering/server-pipeline'
import serverToken, { decodeToken, generateToken } from '@hcengineering/server-token'
import toolPlugin, { FileModelLogger } from '@hcengineering/server-tool'
import { FileModelLogger } from '@hcengineering/server-tool'
import { createWorkspace, upgradeWorkspace } from '@hcengineering/workspace-service'
import path from 'path'
@ -167,11 +167,6 @@ export function devTool (
console.error('please provide transactor url.')
}
const initScriptUrl = process.env.INIT_SCRIPT_URL
if (initScriptUrl !== undefined) {
setMetadata(toolPlugin.metadata.InitScriptURL, initScriptUrl)
}
setMetadata(accountPlugin.metadata.Transactors, transactorUrl)
setMetadata(serverClientPlugin.metadata.Endpoint, accountsUrl)
setMetadata(serverToken.metadata.Secret, serverSecret)

View File

@ -0,0 +1,7 @@
module.exports = {
extends: ['./node_modules/@hcengineering/platform-rig/profiles/default/eslint.config.json'],
parserOptions: {
tsconfigRootDir: __dirname,
project: './tsconfig.json'
}
}

View File

@ -0,0 +1,4 @@
*
!/lib/**
!CHANGELOG.md
/lib/**/__tests__/

View File

@ -0,0 +1,7 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
testMatch: ['**/?(*.)+(spec|test).[jt]s?(x)'],
roots: ["./src"],
coverageReporters: ["text-summary", "html"]
}

View File

@ -0,0 +1,69 @@
{
"name": "@hcengineering/importer",
"version": "0.6.1",
"main": "lib/index.js",
"svelte": "src/index.ts",
"types": "types/index.d.ts",
"files": [
"lib/**/*",
"types/**/*",
"tsconfig.json"
],
"author": "Anticrm Platform Contributors",
"license": "EPL-2.0",
"scripts": {
"build": "compile",
"build:watch": "compile",
"test": "jest --passWithNoTests --silent",
"format": "format src",
"_phase:build": "compile transpile src",
"_phase:test": "jest --passWithNoTests --silent",
"_phase:format": "format src",
"_phase:validate": "compile validate"
},
"devDependencies": {
"@hcengineering/platform-rig": "^0.6.0",
"@typescript-eslint/eslint-plugin": "^6.11.0",
"@typescript-eslint/parser": "^6.11.0",
"eslint": "^8.54.0",
"eslint-config-standard-with-typescript": "^40.0.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-n": "^15.4.0",
"eslint-plugin-promise": "^6.1.1",
"prettier": "^3.1.0",
"typescript": "^5.3.3",
"jest": "^29.7.0",
"ts-jest": "^29.1.1",
"@types/jest": "^29.5.5",
"@types/node": "~20.11.16",
"@types/mime-types": "~2.1.1",
"@types/csvtojson": "^2.0.0",
"@types/js-yaml": "^4.0.9"
},
"dependencies": {
"@hcengineering/attachment": "^0.6.14",
"@hcengineering/chunter": "^0.6.20",
"@hcengineering/collaboration": "^0.6.0",
"@hcengineering/contact": "^0.6.24",
"@hcengineering/core": "^0.6.32",
"@hcengineering/document": "^0.6.0",
"@hcengineering/model-attachment": "^0.6.0",
"@hcengineering/model-core": "^0.6.0",
"@hcengineering/platform": "^0.6.11",
"@hcengineering/rank": "^0.6.4",
"@hcengineering/server-core": "^0.6.1",
"@hcengineering/task": "^0.6.20",
"@hcengineering/text": "^0.6.5",
"@hcengineering/tracker": "^0.6.24",
"@hcengineering/view": "^0.6.13",
"commander": "^8.1.0",
"mime-types": "~2.1.34",
"csvtojson": "^2.0.10",
"js-yaml": "^4.1.0",
"image-size": "^1.1.1"
},
"repository": "https://github.com/hcengineering/platform",
"publishConfig": {
"registry": "https://npm.pkg.github.com"
}
}

View File

@ -25,9 +25,9 @@ import {
type ImportProject,
type ImportProjectType
} from '../importer/importer'
import { type FileUploader } from '../importer/uploader'
import { type Logger } from '../importer/logger'
import { BaseMarkdownPreprocessor } from '../importer/preprocessor'
import { type FileUploader } from '../importer/uploader'
interface ClickupTask {
'Task ID': string
'Task Name': string
@ -86,7 +86,8 @@ class ClickupImporter {
constructor (
private readonly client: TxOperations,
private readonly fileUploader: FileUploader
private readonly fileUploader: FileUploader,
private readonly logger: Logger
) {}
async importClickUpTasks (file: string): Promise<void> {
@ -102,13 +103,13 @@ class ClickupImporter {
spaces
}
console.log('========================================')
console.log('IMPORT DATA STRUCTURE: ', JSON.stringify(importData, null, 4))
console.log('========================================')
this.logger.log('========================================')
this.logger.log('IMPORT DATA STRUCTURE: ', JSON.stringify(importData, null, 4))
this.logger.log('========================================')
const postprocessor = new ClickupMarkdownPreprocessor(this.personsByName)
await new WorkspaceImporter(this.client, this.fileUploader, importData, postprocessor).performImport()
console.log('========================================')
console.log('IMPORT SUCCESS ')
await new WorkspaceImporter(this.client, this.logger, this.fileUploader, importData, postprocessor).performImport()
this.logger.log('========================================')
this.logger.log('IMPORT SUCCESS ')
}
private async processTasksCsv (file: string, process: (json: ClickupTask) => Promise<void> | void): Promise<void> {
@ -136,8 +137,8 @@ class ClickupImporter {
statuses.add(clickupTask.Status)
})
console.log(projects)
console.log(statuses)
this.logger.log('Projects: ' + JSON.stringify(projects))
this.logger.log('Statuses: ' + JSON.stringify(statuses))
const importProjectType = this.createClickupProjectType(Array.from(statuses))

View File

@ -18,8 +18,9 @@ import contact, { type Person, type PersonAccount } from '@hcengineering/contact
import { type Class, type Doc, generateId, type Ref, type Space, type TxOperations } from '@hcengineering/core'
import document, { type Document } from '@hcengineering/document'
import { MarkupMarkType, type MarkupNode, MarkupNodeType, traverseNode, traverseNodeMarks } from '@hcengineering/text'
import tracker, { type Issue } from '@hcengineering/tracker'
import tracker, { type Issue, Project } from '@hcengineering/tracker'
import * as fs from 'fs'
import sizeOf from 'image-size'
import * as yaml from 'js-yaml'
import { contentType } from 'mime-types'
import * as path from 'path'
@ -28,6 +29,7 @@ import {
type ImportAttachment,
type ImportComment,
type ImportDocument,
ImportDrawing,
type ImportIssue,
type ImportProject,
type ImportProjectType,
@ -35,12 +37,14 @@ import {
type ImportWorkspace,
WorkspaceImporter
} from '../importer/importer'
import { type Logger } from '../importer/logger'
import { BaseMarkdownPreprocessor } from '../importer/preprocessor'
import { type FileUploader } from '../importer/uploader'
interface UnifiedComment {
author: string
text: string
attachments?: string[]
}
interface UnifiedIssueHeader {
@ -59,14 +63,17 @@ interface UnifiedSpaceSettings {
title: string
private?: boolean
autoJoin?: boolean
archived?: boolean
owners?: string[]
members?: string[]
description?: string
emoji?: string
}
interface UnifiedProjectSettings extends UnifiedSpaceSettings {
class: 'tracker:class:Project'
identifier: string
id?: 'tracker:project:DefaultProject'
projectType?: string
defaultIssueStatus?: string
}
@ -97,6 +104,7 @@ interface UnifiedWorkspaceSettings {
class HulyMarkdownPreprocessor extends BaseMarkdownPreprocessor {
constructor (
private readonly urlProvider: (id: string) => string,
private readonly logger: Logger,
private readonly metadataByFilePath: Map<string, DocMetadata>,
private readonly metadataById: Map<Ref<Doc>, DocMetadata>,
private readonly attachMetadataByPath: Map<string, AttachmentMetadata>,
@ -130,7 +138,7 @@ class HulyMarkdownPreprocessor extends BaseMarkdownPreprocessor {
const attachmentMeta = this.attachMetadataByPath.get(fullPath)
if (attachmentMeta === undefined) {
console.warn(`Attachment image not found for ${fullPath}`)
this.logger.error(`Attachment image not found for ${fullPath}`)
return
}
@ -160,7 +168,7 @@ class HulyMarkdownPreprocessor extends BaseMarkdownPreprocessor {
this.updateAttachmentMetadata(fullPath, attachmentMeta, id, spaceId, sourceMeta)
}
} else {
console.log('Unknown link type, leave it as is:', href)
this.logger.log('Unknown link type, leave it as is: ' + href)
}
})
}
@ -218,7 +226,7 @@ class HulyMarkdownPreprocessor extends BaseMarkdownPreprocessor {
private getSourceMetadata (id: Ref<Doc>): DocMetadata | null {
const sourceMeta = this.metadataById.get(id)
if (sourceMeta == null) {
console.warn(`Source metadata not found for ${id}`)
this.logger.error(`Source metadata not found for ${id}`)
return null
}
return sourceMeta
@ -259,56 +267,118 @@ interface AttachmentMetadata {
export class UnifiedFormatImporter {
private readonly metadataById = new Map<Ref<Doc>, DocMetadata>()
private readonly metadataByFilePath = new Map<string, DocMetadata>()
private readonly attachMetadataByPath = new Map<string, AttachmentMetadata>()
private readonly fileMetadataByPath = new Map<string, AttachmentMetadata>()
private personsByName = new Map<string, Ref<Person>>()
private accountsByEmail = new Map<string, Ref<PersonAccount>>()
constructor (
private readonly client: TxOperations,
private readonly fileUploader: FileUploader
private readonly fileUploader: FileUploader,
private readonly logger: Logger
) {}
async importFolder (folderPath: string): Promise<void> {
await this.cachePersonsByNames()
await this.cacheAccountsByEmails()
await this.collectFileMetadata(folderPath)
const workspaceData = await this.processImportFolder(folderPath)
console.log('========================================')
console.log('IMPORT DATA STRUCTURE: ', JSON.stringify(workspaceData, null, 4))
console.log('========================================')
this.logger.log('========================================')
this.logger.log('IMPORT DATA STRUCTURE: ' + JSON.stringify(workspaceData))
this.logger.log('========================================')
console.log('Importing documents...')
this.logger.log('Importing documents...')
const preprocessor = new HulyMarkdownPreprocessor(
this.fileUploader.getFileUrl,
this.logger,
this.metadataByFilePath,
this.metadataById,
this.attachMetadataByPath,
this.fileMetadataByPath,
this.personsByName
)
await new WorkspaceImporter(this.client, this.fileUploader, workspaceData, preprocessor).performImport()
await new WorkspaceImporter(
this.client,
this.logger,
this.fileUploader,
workspaceData,
preprocessor
).performImport()
console.log('Importing attachments...')
const attachments: ImportAttachment[] = Array.from(this.attachMetadataByPath.values())
.filter((attachment) => attachment.parentId !== undefined)
.map((attachment) => {
return {
id: attachment.id,
title: path.basename(attachment.path),
blobProvider: async () => {
const data = fs.readFileSync(attachment.path)
return new Blob([data])
},
parentId: attachment.parentId,
parentClass: attachment.parentClass,
spaceId: attachment.spaceId
this.logger.log('Importing attachments...')
const attachments: ImportAttachment[] = await Promise.all(
Array.from(this.fileMetadataByPath.values())
.filter((attachMeta) => attachMeta.parentId !== undefined)
.map(async (attachMeta: AttachmentMetadata) => await this.processAttachment(attachMeta))
)
await new WorkspaceImporter(this.client, this.logger, this.fileUploader, { attachments }).performImport()
this.logger.log('========================================')
this.logger.log('IMPORT SUCCESS')
}
private async processAttachment (attachMeta: AttachmentMetadata): Promise<ImportAttachment> {
const fileType = contentType(attachMeta.name)
const attachment: ImportAttachment = {
id: attachMeta.id,
title: path.basename(attachMeta.path),
blobProvider: async () => {
const data = fs.readFileSync(attachMeta.path)
const props = fileType !== false ? { type: fileType } : undefined
return new Blob([data], props)
},
parentId: attachMeta.parentId,
parentClass: attachMeta.parentClass,
spaceId: attachMeta.spaceId
}
if (fileType !== false && fileType?.startsWith('image/')) {
try {
const imageDimensions = sizeOf(attachMeta.path)
attachment.metadata = {
originalWidth: imageDimensions.width ?? 0,
originalHeight: imageDimensions.height ?? 0
}
})
await new WorkspaceImporter(this.client, this.fileUploader, { attachments }).performImport()
} catch (error) {
this.logger.error(`Failed to get image dimensions: ${attachMeta.path}`, error)
}
console.log('========================================')
console.log('IMPORT SUCCESS')
const pathDetails = path.parse(attachMeta.path)
const childrenDir = path.join(pathDetails.dir, pathDetails.name.replace(pathDetails.ext, ''))
if (fs.existsSync(childrenDir) && fs.statSync(childrenDir).isDirectory()) {
attachment.drawings = await this.processDrawings(childrenDir)
}
}
return attachment
}
private async processDrawings (folderPath: string): Promise<ImportDrawing[]> {
this.logger.log(`Processing drawings in ${folderPath}...`)
const entries = fs.readdirSync(folderPath, { withFileTypes: true })
const drawings: ImportDrawing[] = []
for (const entry of entries) {
const fullPath = path.join(folderPath, entry.name)
if (entry.isFile() && entry.name.endsWith('.json')) {
const content = fs.readFileSync(fullPath, 'utf8')
const json = JSON.parse(content)
if (json.class !== 'drawing:class:Drawing') {
this.logger.log(`Skipping ${fullPath}: not a drawing`)
continue
}
drawings.push({
contentProvider: async () => {
return JSON.stringify(json.content)
}
})
}
}
return drawings
}
private async processImportFolder (folderPath: string): Promise<ImportWorkspace> {
@ -336,11 +406,11 @@ export class UnifiedFormatImporter {
const spacePath = path.join(folderPath, spaceName)
try {
console.log(`Processing ${spaceName}...`)
this.logger.log(`Processing ${spaceName}...`)
const spaceConfig = yaml.load(fs.readFileSync(yamlPath, 'utf8')) as UnifiedSpaceSettings
if (spaceConfig.class === undefined) {
console.warn(`Skipping ${spaceName}: not a space - no class specified`)
if (spaceConfig?.class === undefined) {
this.logger.error(`Skipping ${spaceName}: not a space - no class specified`)
continue
}
@ -373,8 +443,6 @@ export class UnifiedFormatImporter {
}
}
await this.processAttachments(folderPath)
return builder.build()
}
@ -392,7 +460,7 @@ export class UnifiedFormatImporter {
const issueHeader = (await this.readYamlHeader(issuePath)) as UnifiedIssueHeader
if (issueHeader.class === undefined) {
console.warn(`Skipping ${issueFile}: not an issue`)
this.logger.error(`Skipping ${issueFile}: not an issue`)
continue
}
@ -420,7 +488,7 @@ export class UnifiedFormatImporter {
priority: issueHeader.priority,
estimation: issueHeader.estimation,
remainingTime: issueHeader.remainingTime,
comments: this.processComments(issueHeader.comments),
comments: await this.processComments(currentPath, issueHeader.comments),
subdocs: [], // Will be added via builder
assignee: this.findPersonByName(issueHeader.assignee)
}
@ -470,7 +538,7 @@ export class UnifiedFormatImporter {
const docHeader = (await this.readYamlHeader(docPath)) as UnifiedDocumentHeader
if (docHeader.class === undefined) {
console.warn(`Skipping ${docFile}: not a document`)
this.logger.error(`Skipping ${docFile}: not a document`)
continue
}
@ -506,13 +574,27 @@ export class UnifiedFormatImporter {
}
}
private processComments (comments: UnifiedComment[] = []): ImportComment[] {
return comments.map((comment) => {
return {
text: comment.text,
author: this.findAccountByEmail(comment.author)
}
})
private processComments (currentPath: string, comments: UnifiedComment[] = []): Promise<ImportComment[]> {
return Promise.all(
comments.map(async (comment) => {
const attachments: ImportAttachment[] = []
if (comment.attachments !== undefined) {
for (const attachmentPath of comment.attachments) {
const fullPath = path.resolve(currentPath, attachmentPath)
const attachmentMeta = this.fileMetadataByPath.get(fullPath)
if (attachmentMeta !== undefined) {
const importAttachment = await this.processAttachment(attachmentMeta)
attachments.push(importAttachment)
}
}
}
return {
text: comment.text,
author: this.findAccountByEmail(comment.author),
attachments
}
})
)
}
private processProjectTypes (wsHeader: UnifiedWorkspaceSettings): ImportProjectType[] {
@ -534,11 +616,14 @@ export class UnifiedFormatImporter {
private async processProject (projectHeader: UnifiedProjectSettings): Promise<ImportProject> {
return {
class: tracker.class.Project,
id: projectHeader.id as Ref<Project>,
title: projectHeader.title,
identifier: projectHeader.identifier,
private: projectHeader.private ?? false,
autoJoin: projectHeader.autoJoin ?? true,
archived: projectHeader.archived ?? false,
description: projectHeader.description,
emoji: projectHeader.emoji,
defaultIssueStatus:
projectHeader.defaultIssueStatus !== undefined ? { name: projectHeader.defaultIssueStatus } : undefined,
owners:
@ -555,7 +640,9 @@ export class UnifiedFormatImporter {
title: spaceHeader.title,
private: spaceHeader.private ?? false,
autoJoin: spaceHeader.autoJoin ?? true,
archived: spaceHeader.archived ?? false,
description: spaceHeader.description,
emoji: spaceHeader.emoji,
owners: spaceHeader.owners !== undefined ? spaceHeader.owners.map((email) => this.findAccountByEmail(email)) : [],
members:
spaceHeader.members !== undefined ? spaceHeader.members.map((email) => this.findAccountByEmail(email)) : [],
@ -564,7 +651,7 @@ export class UnifiedFormatImporter {
}
private async readYamlHeader (filePath: string): Promise<any> {
console.log('Read YAML header from: ', filePath)
this.logger.log('Read YAML header from: ' + filePath)
const content = fs.readFileSync(filePath, 'utf8')
const match = content.match(/^---\n([\s\S]*?)\n---/)
if (match != null) {
@ -601,7 +688,7 @@ export class UnifiedFormatImporter {
}, new Map())
}
private async processAttachments (folderPath: string): Promise<void> {
private async collectFileMetadata (folderPath: string): Promise<void> {
const processDir = async (dir: string): Promise<void> => {
const entries = fs.readdirSync(dir, { withFileTypes: true })
@ -611,11 +698,8 @@ export class UnifiedFormatImporter {
if (entry.isDirectory()) {
await processDir(fullPath)
} else if (entry.isFile()) {
// Skip files that are already processed as documents or issues
if (!this.metadataByFilePath.has(fullPath)) {
const attachmentId = generateId<Attachment>()
this.attachMetadataByPath.set(fullPath, { id: attachmentId, name: entry.name, path: fullPath })
}
const attachmentId = generateId<Attachment>()
this.fileMetadataByPath.set(fullPath, { id: attachmentId, name: entry.name, path: fullPath })
}
}
}

View File

@ -256,6 +256,18 @@ export class ImportWorkspaceBuilder {
errors.push('defaultIssueStatus not found: ' + project.defaultIssueStatus.name)
}
if (project.id !== undefined && project.id !== tracker.project.DefaultProject) {
errors.push('update operation is only allowed for tracker:project:DefaultProject')
}
if (project.archived !== undefined) {
errors.push(...this.validateType(project.archived, 'boolean', 'archived'))
}
if (project.emoji !== undefined) {
errors.push(...this.validateEmoji(project.emoji))
}
errors.push(...this.validateProjectIdentifier(project.identifier))
return errors
}
@ -303,9 +315,22 @@ export class ImportWorkspaceBuilder {
errors.push(...this.validateType(teamspace.description, 'string', 'description'))
}
if (teamspace.archived !== undefined) {
errors.push(...this.validateType(teamspace.archived, 'boolean', 'archived'))
}
if (teamspace.emoji !== undefined) {
errors.push(...this.validateType(teamspace.emoji, 'string', 'emoji'))
}
if (teamspace.emoji !== undefined) {
errors.push(...this.validateEmoji(teamspace.emoji))
}
if (!this.validateStringDefined(teamspace.title)) {
errors.push('title is required')
}
if (teamspace.class !== document.class.Teamspace) {
errors.push('invalid class: ' + teamspace.class)
}
@ -446,6 +471,14 @@ export class ImportWorkspaceBuilder {
issue.subdocs = childIssues
}
private validateEmoji (emoji: string): string[] {
const errors: string[] = []
if (typeof emoji === 'string' && emoji.codePointAt(0) == null) {
errors.push('Invalid emoji: ' + emoji)
}
return errors
}
private validateType (value: unknown, type: 'string' | 'number' | 'boolean', fieldName: string): string[] {
const errors: string[] = []
switch (type) {

View File

@ -13,19 +13,27 @@
// limitations under the License.
//
import {
type Ref,
type Blob as PlatformBlob,
type CollaborativeDoc,
concatLink,
makeCollabJsonId
makeCollabJsonId,
Markup,
type Blob as PlatformBlob,
type Ref
} from '@hcengineering/core'
import { FileUploader, UploadResult } from './uploader'
export interface FileUploader {
uploadFile: (name: string, file: Blob) => Promise<Ref<PlatformBlob>>
uploadCollaborativeDoc: (collabId: CollaborativeDoc, data: Buffer) => Promise<Ref<PlatformBlob>>
getFileUrl: (id: string) => string
interface FileUploadError {
key: string
error: string
}
interface FileUploadSuccess {
key: string
id: string
}
type FileUploadResult = FileUploadSuccess | FileUploadError
export class FrontFileUploader implements FileUploader {
constructor (
private readonly frontUrl: string,
@ -35,11 +43,11 @@ export class FrontFileUploader implements FileUploader {
this.getFileUrl = this.getFileUrl.bind(this)
}
public async uploadFile (name: string, file: Blob): Promise<Ref<PlatformBlob>> {
public async uploadFile (name: string, blob: Blob): Promise<UploadResult> {
const form = new FormData()
form.append('file', file, name)
form.append('file', blob, name)
const res = await fetch(concatLink(this.frontUrl, '/files'), {
const response = await fetch(concatLink(this.frontUrl, '/files'), {
method: 'POST',
headers: {
Authorization: 'Bearer ' + this.token
@ -47,20 +55,36 @@ export class FrontFileUploader implements FileUploader {
body: form
})
if (res.ok && res.status === 200) {
return name as Ref<PlatformBlob>
if (response.status !== 200) {
return { success: false, error: response.statusText }
}
throw new Error('Failed to upload file')
const responseText = await response.text()
if (responseText === undefined) {
return { success: false, error: response.statusText }
}
const uploadResult = JSON.parse(responseText) as FileUploadResult[]
if (!Array.isArray(uploadResult) || uploadResult.length === 0) {
return { success: false, error: response.statusText }
}
const result = uploadResult[0]
if ('error' in result) {
return { success: false, error: result.error }
}
return { success: true, id: result.id as Ref<PlatformBlob> }
}
public getFileUrl (id: string): string {
return concatLink(this.frontUrl, `/files/${this.workspaceId}/${id}?file=${id}&workspace=${this.workspaceId}`)
}
public async uploadCollaborativeDoc (collabId: CollaborativeDoc, data: Buffer): Promise<Ref<PlatformBlob>> {
public async uploadCollaborativeDoc (collabId: CollaborativeDoc, content: Markup): Promise<UploadResult> {
const buffer = Buffer.from(content)
const blobId = makeCollabJsonId(collabId)
const blob = new Blob([data], { type: 'application/json' })
const blob = new Blob([buffer], { type: 'application/json' })
return await this.uploadFile(blobId, blob)
}
}

View File

@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
//
import attachment, { type Attachment } from '@hcengineering/attachment'
import attachment, { Drawing, type Attachment } from '@hcengineering/attachment'
import chunter, { type ChatMessage } from '@hcengineering/chunter'
import { type Person } from '@hcengineering/contact'
import core, {
@ -51,8 +51,10 @@ import tracker, {
type Project,
TimeReportDayType
} from '@hcengineering/tracker'
import view from '@hcengineering/view'
import { type MarkdownPreprocessor, NoopMarkdownPreprocessor } from './preprocessor'
import { type FileUploader } from './uploader'
import { Logger } from './logger'
export interface ImportWorkspace {
projectTypes?: ImportProjectType[]
@ -82,7 +84,9 @@ export interface ImportSpace<T extends ImportDoc> {
title: string
private: boolean
autoJoin?: boolean
archived?: boolean
description?: string
emoji?: string
owners?: Ref<Account>[]
members?: Ref<Account>[]
docs: T[]
@ -107,6 +111,7 @@ export interface ImportDocument extends ImportDoc {
export interface ImportProject extends ImportSpace<ImportIssue> {
class: Ref<Class<Project>>
id?: Ref<Project>
identifier: string
projectType?: ImportProjectType
defaultIssueStatus?: ImportStatus
@ -139,6 +144,17 @@ export interface ImportAttachment {
parentId?: Ref<Doc>
parentClass?: Ref<Class<Doc<Space>>>
spaceId?: Ref<Space>
metadata?: ImportImageMetadata
drawings?: ImportDrawing[]
}
export interface ImportImageMetadata {
originalWidth: number
originalHeight: number
}
export interface ImportDrawing {
contentProvider: () => Promise<string>
}
export class WorkspaceImporter {
@ -147,6 +163,7 @@ export class WorkspaceImporter {
constructor (
private readonly client: TxOperations,
private readonly logger: Logger,
private readonly fileUploader: FileUploader,
private readonly workspaceData: ImportWorkspace,
private readonly preprocessor: MarkdownPreprocessor = new NoopMarkdownPreprocessor()
@ -234,9 +251,9 @@ export class WorkspaceImporter {
}
async importTeamspace (space: ImportTeamspace): Promise<Ref<Teamspace>> {
console.log('Creating teamspace: ', space.title)
this.logger.log('Creating teamspace: ' + space.title)
const teamspaceId = await this.createTeamspace(space)
console.log('Teamspace created: ', teamspaceId)
this.logger.log('Teamspace created: ' + teamspaceId)
for (const doc of space.docs) {
await this.createDocumentWithSubdocs(doc, document.ids.NoParent, teamspaceId)
}
@ -248,9 +265,9 @@ export class WorkspaceImporter {
parentId: Ref<Document>,
teamspaceId: Ref<Teamspace>
): Promise<Ref<Document>> {
console.log('Creating document: ', doc.title)
this.logger.log('Creating document: ' + doc.title)
const documentId = await this.createDocument(doc, parentId, teamspaceId)
console.log('Document created: ', documentId)
this.logger.log('Document created: ' + documentId)
for (const child of doc.subdocs) {
await this.createDocumentWithSubdocs(child, documentId, teamspaceId)
}
@ -259,16 +276,19 @@ export class WorkspaceImporter {
async createTeamspace (space: ImportTeamspace): Promise<Ref<Teamspace>> {
const teamspaceId = generateId<Teamspace>()
const codePoint = space.emoji?.codePointAt(0)
const data = {
type: document.spaceType.DefaultTeamspaceType,
description: space.description ?? '',
title: space.title,
name: space.title,
private: space.private,
color: codePoint,
icon: codePoint === undefined ? undefined : view.ids.IconWithEmoji,
owners: space.owners ?? [],
members: space.members ?? [],
autoJoin: space.autoJoin,
archived: false
archived: space.archived ?? false
}
await this.client.createDoc(document.class.Teamspace, core.space.Space, data, teamspaceId)
return teamspaceId
@ -287,7 +307,7 @@ export class WorkspaceImporter {
const lastRank = await getFirstRank(this.client, teamspaceId, parentId)
const rank = makeRank(lastRank, undefined)
const attachedData: Data<Document> = {
const data: Data<Document> = {
title: doc.title,
content: contentId,
parent: parentId,
@ -299,14 +319,22 @@ export class WorkspaceImporter {
rank
}
await this.client.createDoc(document.class.Document, teamspaceId, attachedData, id)
await this.client.createDoc(document.class.Document, teamspaceId, data, id)
return id
}
async importProject (project: ImportProject): Promise<Ref<Project>> {
console.log('Creating project: ', project.title)
const projectId = await this.createProject(project)
console.log('Project created: ' + projectId)
let projectId: Ref<Project>
if (project.id === tracker.project.DefaultProject) {
this.logger.log('Setting up default project: ' + project.title)
projectId = tracker.project.DefaultProject
await this.updateProject(projectId, project)
this.logger.log('Default project updated: ' + projectId)
} else {
this.logger.log('Creating project: ', project.title)
projectId = await this.createProject(project)
this.logger.log('Project created: ' + projectId)
}
const projectDoc = await this.client.findOne(tracker.class.Project, { _id: projectId })
if (projectDoc === undefined) {
@ -314,7 +342,7 @@ export class WorkspaceImporter {
}
for (const issue of project.docs) {
await this.createIssueWithSubissues(issue, tracker.ids.NoParent, projectDoc, [])
await this.createIssueWithSubissues(issue, tracker.ids.NoParent, projectDoc, projectId, [])
}
return projectId
}
@ -323,31 +351,69 @@ export class WorkspaceImporter {
issue: ImportIssue,
parentId: Ref<Issue>,
project: Project,
spaceId: Ref<Project>,
parentsInfo: IssueParentInfo[]
): Promise<{ id: Ref<Issue>, identifier: string }> {
console.log('Creating issue: ', issue.title)
const issueResult = await this.createIssue(issue, project, parentId, parentsInfo)
console.log('Issue created: ', issueResult)
this.logger.log('Creating issue: ' + issue.title)
const issueResult = await this.createIssue(issue, project, parentId, spaceId, parentsInfo)
this.logger.log('Issue created: ' + JSON.stringify(issueResult))
if (issue.subdocs.length > 0) {
const parentsInfoEx = [
{
parentId: issueResult.id,
parentTitle: issue.title,
space: project._id,
space: spaceId,
identifier: issueResult.identifier
},
...parentsInfo
]
for (const child of issue.subdocs) {
await this.createIssueWithSubissues(child as ImportIssue, issueResult.id, project, parentsInfoEx)
await this.createIssueWithSubissues(child as ImportIssue, issueResult.id, project, spaceId, parentsInfoEx)
}
}
return issueResult
}
async updateProject (projectId: Ref<Project>, project: ImportProject): Promise<Ref<Project>> {
const oldProject = await this.client.findOne(tracker.class.Project, { _id: projectId })
if (oldProject === undefined) {
throw new Error('Project not found: ' + projectId)
}
const maxIssueNumber = this.getMaxImportIssueNumber(project)
const codePoint = project.emoji?.codePointAt(0)
const projectData = {
name: project.title,
private: project.private,
description: project.description ?? oldProject.description,
members: project.members ?? oldProject.members,
owners: project.owners ?? oldProject.owners,
archived: project.archived ?? oldProject.archived,
autoJoin: project.autoJoin ?? oldProject.autoJoin,
identifier:
project.identifier !== undefined
? await this.uniqueProjectIdentifier(project.identifier)
: oldProject.identifier,
sequence: Math.max(oldProject.sequence, maxIssueNumber),
color: codePoint ?? oldProject.color,
icon: codePoint === undefined ? undefined : view.ids.IconWithEmoji,
defaultIssueStatus:
project.defaultIssueStatus !== undefined
? this.issueStatusByName.get(project.defaultIssueStatus.name)
: oldProject.defaultIssueStatus,
defaultTimeReportDay: oldProject.defaultTimeReportDay,
type:
project.projectType !== undefined
? this.projectTypeByName.get(project.projectType.name) ?? tracker.ids.ClassingProjectType
: oldProject.type
}
await this.client.updateDoc(tracker.class.Project, core.space.Space, projectId, projectData)
return projectId
}
async createProject (project: ImportProject): Promise<Ref<Project>> {
const projectId = generateId<Project>()
@ -362,6 +428,7 @@ export class WorkspaceImporter {
: tracker.status.Backlog
const identifier = await this.uniqueProjectIdentifier(project.identifier)
const codePoint = project.emoji?.codePointAt(0)
const projectData = {
name: project.title,
description: project.description ?? '',
@ -371,7 +438,9 @@ export class WorkspaceImporter {
archived: false,
autoJoin: project.autoJoin,
identifier,
sequence: 0,
sequence: this.getMaxImportIssueNumber(project),
color: codePoint,
icon: codePoint != null ? view.ids.IconWithEmoji : undefined,
defaultIssueStatus: defaultIssueStatus ?? tracker.status.Backlog,
defaultTimeReportDay: TimeReportDayType.PreviousWorkDay,
type: projectType as Ref<ProjectType>
@ -384,24 +453,30 @@ export class WorkspaceImporter {
return projectId
}
private getMaxImportIssueNumber (project: ImportProject): number {
const maxIssueNumber = Math.max(...project.docs.map((doc) => doc.number ?? 0))
return maxIssueNumber
}
async createIssue (
issue: ImportIssue,
project: Project,
parentId: Ref<Issue>,
spaceId: Ref<Project>,
parentsInfo: IssueParentInfo[]
): Promise<{ id: Ref<Issue>, identifier: string }> {
const issueId = issue.id ?? generateId<Issue>()
const content = await issue.descrProvider()
const collabId = makeCollabId(tracker.class.Issue, issueId, 'description')
const contentId = await this.createCollaborativeContent(issueId, collabId, content, project._id)
const contentId = await this.createCollaborativeContent(issueId, collabId, content, spaceId)
const { number, identifier } =
issue.number !== undefined
? { number: issue.number, identifier: `${project.identifier}-${issue.number}` }
: await this.getNextIssueIdentifier(project)
: await this.getNextIssueIdentifier(project, spaceId)
const kind = await this.getIssueKind(project)
const rank = await this.getIssueRank(project)
const rank = await this.getIssueRank(project, spaceId)
const status = await this.findIssueStatusByName(issue.status.name)
const priority =
issue.priority !== undefined
@ -436,7 +511,7 @@ export class WorkspaceImporter {
await this.client.addCollection(
tracker.class.Issue,
project._id,
spaceId,
parentId,
tracker.class.Issue,
'subIssues',
@ -445,16 +520,19 @@ export class WorkspaceImporter {
)
if (issue.comments !== undefined) {
await this.importComments(issueId, issue.comments, project._id)
await this.importComments(issueId, issue.comments, spaceId)
}
return { id: issueId, identifier }
}
private async getNextIssueIdentifier (project: Project): Promise<{ number: number, identifier: string }> {
private async getNextIssueIdentifier (
project: Project,
spaceId: Ref<Project>
): Promise<{ number: number, identifier: string }> {
const incResult = await this.client.updateDoc(
tracker.class.Project,
core.space.Space,
project._id,
spaceId,
{ $inc: { sequence: 1 } },
true
)
@ -467,15 +545,15 @@ export class WorkspaceImporter {
const taskKind = project?.type !== undefined ? { parent: project.type } : {}
const kind = await this.client.findOne(task.class.TaskType, taskKind)
if (kind === undefined) {
throw new Error(`Task type not found for project: ${project._id}`)
throw new Error(`Task type not found for project: ${project.name}`)
}
return kind
}
private async getIssueRank (project: Project): Promise<string> {
private async getIssueRank (project: Project, spaceId: Ref<Project>): Promise<string> {
const lastIssue = await this.client.findOne<Issue>(
tracker.class.Issue,
{ space: project._id },
{ space: spaceId },
{ sort: { rank: SortingOrder.Descending } }
)
return makeRank(lastIssue?.rank, undefined)
@ -529,28 +607,44 @@ export class WorkspaceImporter {
): Promise<void> {
const blob = await attachment.blobProvider()
if (blob === null) {
console.warn('Failed to read attachment file: ', attachment.title)
this.logger.error('Failed to read attachment file: ' + attachment.title)
return
}
const file = new File([blob], attachment.title)
const file = new File([blob], attachment.title, { type: blob.type })
try {
await this.createAttachment(attachment.id ?? generateId<Attachment>(), file, spaceId, parentId, parentClass)
const blobId = await this.createAttachment(
attachment.id ?? generateId<Attachment>(),
spaceId,
parentId,
parentClass,
file,
attachment.metadata
)
if (attachment.drawings !== undefined) {
for (const drawing of attachment.drawings) {
await this.createDrawing(blobId, drawing, spaceId)
}
}
} catch {
console.warn('Failed to upload attachment file: ', attachment.title)
this.logger.error('Failed to upload attachment file: ', attachment.title)
}
}
private async createAttachment (
id: Ref<Attachment>,
file: File,
spaceId: Ref<Space>,
parentId: Ref<Doc>,
parentClass: Ref<Class<Doc<Space>>>
): Promise<Ref<Attachment>> {
const attachmentId = generateId<Attachment>()
const blobId = await this.fileUploader.uploadFile(id, file)
parentClass: Ref<Class<Doc<Space>>>,
file: File,
metadata?: ImportImageMetadata
): Promise<Ref<PlatformBlob>> {
const uploadResult = await this.fileUploader.uploadFile(id, file)
if (!uploadResult.success) {
throw new Error('Failed to upload attachment file: ' + file.name)
}
await this.client.addCollection(
attachment.class.Attachment,
spaceId,
@ -558,15 +652,31 @@ export class WorkspaceImporter {
parentClass,
'attachments',
{
file: blobId,
file: uploadResult.id,
lastModified: Date.now(),
name: file.name,
size: file.size,
type: file.type
type: file.type,
metadata
},
id
)
return attachmentId
return uploadResult.id
}
private async createDrawing (
blobId: Ref<PlatformBlob>,
drawing: ImportDrawing,
spaceId: Ref<Space>
): Promise<Ref<Drawing>> {
const id = generateId<Drawing>()
const data: Data<Drawing> = {
parent: blobId,
parentClass: core.class.Blob,
content: await drawing.contentProvider()
}
await this.client.createDoc(attachment.class.Drawing, spaceId, data, id)
return id
}
// Collaborative content handling
@ -580,9 +690,12 @@ export class WorkspaceImporter {
const processedJson = this.preprocessor.process(json, id, spaceId)
const markup = jsonToMarkup(processedJson)
const buffer = Buffer.from(markup)
return await this.fileUploader.uploadCollaborativeDoc(collabId, buffer)
const result = await this.fileUploader.uploadCollaborativeDoc(collabId, markup)
if (result.success) {
return result.id
}
throw new Error('Failed to upload collaborative document: ' + id)
}
async findIssueStatusByName (name: string): Promise<Ref<IssueStatus>> {

View File

@ -0,0 +1,18 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
export interface Logger {
log: (msg: string, data?: any) => void
error: (msg: string, data?: any) => void
}

View File

@ -0,0 +1,59 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import { saveCollabJson } from '@hcengineering/collaboration'
import {
CollaborativeDoc,
Markup,
MeasureContext,
Blob as PlatformBlob,
Ref,
WorkspaceIdWithUrl
} from '@hcengineering/core'
import type { StorageAdapter } from '@hcengineering/server-core'
import { FileUploader, UploadResult } from './uploader'
export class StorageFileUploader implements FileUploader {
constructor (
private readonly ctx: MeasureContext,
private readonly storageAdapter: StorageAdapter,
private readonly wsUrl: WorkspaceIdWithUrl
) {
this.uploadFile = this.uploadFile.bind(this)
}
public async uploadFile (id: string, blob: Blob): Promise<UploadResult> {
try {
const arrayBuffer = await blob.arrayBuffer()
const buffer = Buffer.from(arrayBuffer)
await this.storageAdapter.put(this.ctx, this.wsUrl, id, buffer, blob.type, buffer.byteLength)
return { success: true, id: id as Ref<PlatformBlob> }
} catch (error) {
return { success: false, error: error instanceof Error ? error.message : String(error) }
}
}
public async uploadCollaborativeDoc (collabId: CollaborativeDoc, content: Markup): Promise<UploadResult> {
try {
const blobId = await saveCollabJson(this.ctx, this.storageAdapter, this.wsUrl, collabId, content)
return { success: true, id: blobId }
} catch (error) {
return { success: false, error: error instanceof Error ? error.message : String(error) }
}
}
public getFileUrl (id: string): string {
return ''
}
}

View File

@ -0,0 +1,33 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import { type CollaborativeDoc, type Blob as PlatformBlob, type Markup, type Ref } from '@hcengineering/core'
export interface SuccessUploadResult {
success: true
id: Ref<PlatformBlob>
}
export interface FailureUploadResult {
success: false
error: string
}
export type UploadResult = SuccessUploadResult | FailureUploadResult
export interface FileUploader {
uploadFile: (name: string, blob: Blob) => Promise<UploadResult>
uploadCollaborativeDoc: (collabId: CollaborativeDoc, content: Markup) => Promise<UploadResult>
getFileUrl: (id: string) => string
}

View File

@ -0,0 +1,23 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
export * from './huly/unified'
export * from './clickup/clickup'
export * from './notion/notion'
export * from './importer/uploader'
export * from './importer/storageUploader'
export * from './importer/frontUploader'
export * from './importer/logger'

View File

@ -441,11 +441,13 @@ async function importPageDocument (
preProcessMarkdown(json, documentMetaMap, fileUploader)
}
const markup = jsonToMarkup(json)
const buffer = Buffer.from(markup)
const id = docMeta.id as Ref<Document>
const collabId = makeCollabId(document.class.Document, id, 'content')
const blobId = await fileUploader.uploadCollaborativeDoc(collabId, buffer)
const uploadResult = await fileUploader.uploadCollaborativeDoc(collabId, markup)
if (!uploadResult.success) {
throw new Error('Failed to upload collaborative document: ' + docMeta.id)
}
const parent = (parentMeta?.id as Ref<Document>) ?? document.ids.NoParent
@ -454,7 +456,7 @@ async function importPageDocument (
const attachedData: Data<Document> = {
title: docMeta.name,
content: blobId,
content: uploadResult.id,
parent,
attachments: 0,
embeddings: 0,

View File

@ -0,0 +1,10 @@
{
"extends": "./node_modules/@hcengineering/platform-rig/profiles/default/tsconfig.json",
"compilerOptions": {
"rootDir": "./src",
"outDir": "./lib",
"declarationDir": "./types",
"tsBuildInfoFile": ".build/build.tsbuildinfo"
}
}

View File

@ -14,4 +14,7 @@ ENV MALLOC_CONF=dirty_decay_ms:1000,narenas:2,background_thread:true
COPY bundle/bundle.js ./
COPY bundle/bundle.js.map ./
COPY ini[t]/ ./init-scripts/
CMD [ "node", "bundle.js" ]

View File

@ -455,6 +455,11 @@
"projectFolder": "packages/api-client",
"shouldPublish": false
},
{
"packageName": "@hcengineering/importer",
"projectFolder": "packages/importer",
"shouldPublish": false
},
{
"packageName": "@hcengineering/collaboration",
"projectFolder": "server/collaboration",

View File

@ -53,7 +53,6 @@
"koa-router": "^12.0.1",
"koa-bodyparser": "^4.4.1",
"@koa/cors": "^5.0.0",
"@hcengineering/server-tool": "^0.6.0",
"@hcengineering/server-token": "^0.6.11",
"@hcengineering/analytics": "^0.6.0"
}

View File

@ -17,7 +17,6 @@ import { registerProviders } from '@hcengineering/auth-providers'
import { metricsAggregate, type BrandingMap, type MeasureContext } from '@hcengineering/core'
import platform, { Severity, Status, addStringsLoader, setMetadata } from '@hcengineering/platform'
import serverToken, { decodeToken } from '@hcengineering/server-token'
import toolPlugin from '@hcengineering/server-tool'
import cors from '@koa/cors'
import { type IncomingHttpHeaders } from 'http'
import Koa from 'koa'
@ -87,11 +86,6 @@ export function serveAccount (measureCtx: MeasureContext, brandings: BrandingMap
setMetadata(serverToken.metadata.Secret, serverSecret)
const initScriptUrl = process.env.INIT_SCRIPT_URL
if (initScriptUrl !== undefined) {
setMetadata(toolPlugin.metadata.InitScriptURL, initScriptUrl)
}
const hasSignUp = process.env.DISABLE_SIGNUP !== 'true'
const methods = getMethods(hasSignUp)

View File

@ -46,6 +46,7 @@
"@hcengineering/contact": "^0.6.24",
"@hcengineering/client-resources": "^0.6.27",
"@hcengineering/client": "^0.6.18",
"@hcengineering/importer": "^0.6.1",
"@hcengineering/model": "^0.6.11",
"@hcengineering/rank": "^0.6.4",
"uuid": "^8.3.2",

View File

@ -192,14 +192,20 @@ export async function initializeWorkspace (
progress: (value: number) => Promise<void>
): Promise<void> {
const initWS = branding?.initWorkspace ?? getMetadata(toolPlugin.metadata.InitWorkspace)
const scriptUrl = getMetadata(toolPlugin.metadata.InitScriptURL)
ctx.info('Init script details', { scriptUrl, initWS })
if (initWS === undefined || scriptUrl === undefined) return
const initRepoDir = getMetadata(toolPlugin.metadata.InitRepoDir)
ctx.info('Init script details', { initWS, initRepoDir })
if (initWS === undefined || initRepoDir === undefined) return
const initScriptFile = path.resolve(initRepoDir, 'script.yaml')
if (!fs.existsSync(initScriptFile)) {
ctx.warn('Init script file not found in init directory', { initScriptFile })
return
}
try {
// `https://raw.githubusercontent.com/hcengineering/init/main/script.yaml`
const req = await fetch(scriptUrl)
const text = await req.text()
const text = fs.readFileSync(initScriptFile, 'utf8')
const scripts = yaml.load(text) as any as InitScript[]
let script: InitScript | undefined
if (initWS !== undefined) {
script = scripts.find((it) => it.name === initWS)
@ -211,7 +217,7 @@ export async function initializeWorkspace (
return
}
const initializer = new WorkspaceInitializer(ctx, storageAdapter, wsUrl, client)
const initializer = new WorkspaceInitializer(ctx, storageAdapter, wsUrl, client, initRepoDir)
await initializer.processScript(script, logger, progress)
} catch (err: any) {
ctx.error('Failed to initialize workspace', { error: err })

View File

@ -15,9 +15,11 @@ import core, {
} from '@hcengineering/core'
import { ModelLogger } from '@hcengineering/model'
import { makeRank } from '@hcengineering/rank'
import { StorageFileUploader, UnifiedFormatImporter } from '@hcengineering/importer'
import type { StorageAdapter } from '@hcengineering/server-core'
import { jsonToMarkup, parseMessageMarkdown } from '@hcengineering/text'
import { v4 as uuid } from 'uuid'
import path from 'path'
const fieldRegexp = /\${\S+?}/
@ -35,7 +37,7 @@ export type InitStep<T extends Doc> =
| UpdateStep<T>
| FindStep<T>
| UploadStep
| ImportStep
export interface CreateStep<T extends Doc> {
type: 'create'
_class: Ref<Class<T>>
@ -82,6 +84,11 @@ export interface UploadStep {
resultVariable?: string
}
export interface ImportStep {
type: 'import'
path: string
}
export type Props<T extends Doc> = Data<T> & Partial<Doc> & { space: Ref<Space> }
export class WorkspaceInitializer {
@ -93,7 +100,8 @@ export class WorkspaceInitializer {
private readonly ctx: MeasureContext,
private readonly storageAdapter: StorageAdapter,
private readonly wsUrl: WorkspaceIdWithUrl,
private readonly client: TxOperations
private readonly client: TxOperations,
private readonly initRepoDir: string
) {}
async processScript (
@ -118,6 +126,8 @@ export class WorkspaceInitializer {
await this.processFind(step, vars)
} else if (step.type === 'upload') {
await this.processUpload(step, vars, logger)
} else if (step.type === 'import') {
await this.processImport(step, vars, logger)
}
await progress(Math.round(((index + 1) * 100) / script.steps.length))
@ -152,6 +162,18 @@ export class WorkspaceInitializer {
}
}
private async processImport (step: ImportStep, vars: Record<string, any>, logger: ModelLogger): Promise<void> {
try {
const uploader = new StorageFileUploader(this.ctx, this.storageAdapter, this.wsUrl)
const initPath = path.resolve(this.initRepoDir, step.path)
const importer = new UnifiedFormatImporter(this.client, uploader, logger)
await importer.importFolder(initPath)
} catch (error) {
logger.error('Import failed', error)
throw error
}
}
private async processFind<T extends Doc>(step: FindStep<T>, vars: Record<string, any>): Promise<void> {
const query = this.fillProps(step.query, vars)
const res = await this.client.findOne(step._class, { ...(query as any) })

View File

@ -11,7 +11,7 @@ export const toolId = 'tool' as Plugin
const toolPlugin = plugin(toolId, {
metadata: {
InitWorkspace: '' as Metadata<string>,
InitScriptURL: '' as Metadata<string>
InitRepoDir: '' as Metadata<string>
}
})

View File

@ -82,12 +82,11 @@ export function serveWorkspaceAccount (
if (initWS !== undefined) {
setMetadata(toolPlugin.metadata.InitWorkspace, initWS)
}
const initScriptUrl = process.env.INIT_SCRIPT_URL
if (initScriptUrl !== undefined) {
setMetadata(toolPlugin.metadata.InitScriptURL, initScriptUrl)
}
setMetadata(serverClientPlugin.metadata.UserAgent, 'WorkspaceService')
const initRepoDir = process.env.INIT_REPO_DIR ?? './init-scripts'
setMetadata(toolPlugin.metadata.InitRepoDir, initRepoDir)
setMetadata(serverClientPlugin.metadata.UserAgent, 'WorkspaceService')
setMetadata(serverNotification.metadata.InboxOnlyNotifications, true)
let canceled = false