diff --git a/.gitignore b/.gitignore index 739d78c64a..5e1d668b8d 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ lib/ _api-extractor-temp/ temp/ .idea +pods/workspace/init/ # Logs *.log diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml index 80113372fb..9ae17c5073 100644 --- a/common/config/rush/pnpm-lock.yaml +++ b/common/config/rush/pnpm-lock.yaml @@ -4078,7 +4078,7 @@ packages: version: 0.0.0 '@rush-temp/importer@file:projects/importer.tgz': - resolution: {integrity: sha512-nd4QEoFM7LFj37X/9PCtKl2HTaQl3xnpCbJL+FBuYPJhimHzG4KTvb3E5vZ31OZxgAzYBBLZb1KsswqqlXAJ9A==, tarball: file:projects/importer.tgz} + resolution: {integrity: sha512-jApdKeT5h/FEtXviuENXrD2pT5vphLHvE/7b8wt93xNAgwj61aVtsyT+CfaQCnzPSFYBvx0ECq1MNTwtvVzB0Q==, tarball: file:projects/importer.tgz} version: 0.0.0 '@rush-temp/inventory-assets@file:projects/inventory-assets.tgz': @@ -18574,6 +18574,7 @@ snapshots: '@types/js-yaml': 4.0.9 '@types/mime-types': 2.1.4 '@types/node': 20.11.19 + '@types/uuid': 8.3.4 '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.3.3))(eslint@8.56.0)(typescript@5.6.2) '@typescript-eslint/parser': 6.21.0(eslint@8.56.0)(typescript@5.6.2) commander: 8.3.0 @@ -18590,6 +18591,7 @@ snapshots: prettier: 3.2.5 ts-jest: 29.1.2(@babel/core@7.23.9)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.23.9))(esbuild@0.24.2)(jest@29.7.0(@types/node@20.11.19)(ts-node@10.9.2(@types/node@20.11.19)(typescript@5.3.3)))(typescript@5.6.2) typescript: 5.6.2 + uuid: 8.3.2 transitivePeerDependencies: - '@babel/core' - '@jest/types' diff --git a/dev/import-tool/docs/huly/example-workspace/Difficulty.yaml b/dev/import-tool/docs/huly/example-workspace/Difficulty.yaml new file mode 100644 index 0000000000..b025431660 --- /dev/null +++ b/dev/import-tool/docs/huly/example-workspace/Difficulty.yaml @@ -0,0 +1,9 @@ +class: core:class:Enum +title: Difficulty +values: + - Easy + - Medium + - Hard + - Expert + - Impossible + \ No newline at end of file diff --git a/dev/import-tool/docs/huly/example-workspace/RecipeAssociations.yaml b/dev/import-tool/docs/huly/example-workspace/RecipeAssociations.yaml new file mode 100644 index 0000000000..3b82b3e0e8 --- /dev/null +++ b/dev/import-tool/docs/huly/example-workspace/RecipeAssociations.yaml @@ -0,0 +1,7 @@ +# RecipeRelations.yaml +class: core:class:Association +typeA: "./Recipes.yaml" +typeB: "./Recipes.yaml" +nameA: recommendedDesserts +nameB: recommendedMainDishes +type: "N:N" # 1:1, 1:N, N:N \ No newline at end of file diff --git a/dev/import-tool/docs/huly/example-workspace/Recipes.yaml b/dev/import-tool/docs/huly/example-workspace/Recipes.yaml new file mode 100644 index 0000000000..c57b0ca48f --- /dev/null +++ b/dev/import-tool/docs/huly/example-workspace/Recipes.yaml @@ -0,0 +1,19 @@ +class: card:class:MasterTag +title: Recipe +properties: + - label: cookingTime + type: TypeString + - label: servings + type: TypeNumber + - label: difficulty + enumOf: "./Difficulty.yaml" + # isArray: true # for multiple values + - label: category + type: TypeString + - label: calories + type: TypeNumber + - label: chef + type: TypeString + - label: relatedRecipes + refTo: "./Recipes.yaml" + isArray: true \ No newline at end of file diff --git a/dev/import-tool/docs/huly/example-workspace/Recipes/Chocolate Lava Cake.md b/dev/import-tool/docs/huly/example-workspace/Recipes/Chocolate Lava Cake.md new file mode 100644 index 0000000000..1208904b0e --- /dev/null +++ b/dev/import-tool/docs/huly/example-workspace/Recipes/Chocolate Lava Cake.md @@ -0,0 +1,40 @@ +--- +title: Chocolate Lava Cake +cookingTime: 25 minutes +servings: 4 +difficulty: Medium +category: Dessert +calories: 450 +chef: Anna Smith +blobs: + - ./files/cake.png +recommendedMainDishes: + - ./Classic Margherita Pizza.md + - ./Vegan/Mushroom Risotto.md +--- + +# Chocolate Lava Cake + +## Ingredients +- 200g dark chocolate (70% cocoa) +- 200g butter +- 4 eggs +- 200g sugar +- 120g flour +- 1 tsp vanilla extract +- Pinch of salt +- Butter for ramekins +- Cocoa powder for dusting + +## Instructions +1. Melt chocolate and butter together +2. Whisk eggs and sugar until pale +3. Fold in chocolate mixture +4. Add flour and vanilla +5. Pour into buttered ramekins +6. Bake at 200°C (400°F) for 12 minutes + +## Notes +- Serve immediately while warm +- Can be prepared ahead and refrigerated +- Perfect with vanilla ice cream diff --git a/dev/import-tool/docs/huly/example-workspace/Recipes/Chocolate Lava Cake/Chocolate Sauce.md b/dev/import-tool/docs/huly/example-workspace/Recipes/Chocolate Lava Cake/Chocolate Sauce.md new file mode 100644 index 0000000000..7c42f5cbde --- /dev/null +++ b/dev/import-tool/docs/huly/example-workspace/Recipes/Chocolate Lava Cake/Chocolate Sauce.md @@ -0,0 +1,39 @@ +--- +title: Rich Chocolate Sauce +tags: + - ../DietaryType.yaml +cookingTime: 10 minutes +servings: 4 +difficulty: Easy +category: Dessert Components +calories: 200 +chef: Maria Green +restrictions: Vegetarian +allergens: Dairy +relatedRecipes: + - '../Chocolate Lava Cake.md' +--- + +# Rich Chocolate Sauce for Lava Cake + +## Ingredients +- 100g dark chocolate (70% cocoa) +- 100ml heavy cream +- 30g unsalted butter +- 1 tsp vanilla extract +- Pinch of sea salt + +## Instructions +1. Chop chocolate into small pieces +2. Heat cream until just simmering +3. Pour hot cream over chocolate +4. Let stand for 1 minute +5. Stir until smooth +6. Add butter and vanilla +7. Mix until glossy + +## Notes +- Use high-quality chocolate for best results +- Can be made ahead and reheated +- Store in refrigerator for up to 3 days +- Warm slightly before serving diff --git a/dev/import-tool/docs/huly/example-workspace/Recipes/Classic Margherita Pizza.md b/dev/import-tool/docs/huly/example-workspace/Recipes/Classic Margherita Pizza.md new file mode 100644 index 0000000000..81e72e7e09 --- /dev/null +++ b/dev/import-tool/docs/huly/example-workspace/Recipes/Classic Margherita Pizza.md @@ -0,0 +1,63 @@ +--- +title: Classic Margherita Pizza +tags: + - ./DietaryType.yaml +cookingTime: 30 minutes +servings: 4 +difficulty: Medium +category: Italian +calories: 850 +chef: Mario Rossi +restrictions: Vegetarian +allergens: Gluten, Dairy +recommendedDesserts: + - ./Chocolate Lava Cake.md + +--- + +# Classic Margherita Pizza + +## Ingredients +- 2 1/2 cups (300g) all-purpose flour +- 1 tsp salt +- 1 tsp active dry yeast +- 1 cup warm water +- 2 tbsp olive oil +- 1 cup tomato sauce +- 2 cups mozzarella cheese +- Fresh basil leaves +- Extra virgin olive oil + +## Instructions +1. Mix flour, salt, and yeast in a large bowl +2. Add warm water and olive oil, knead for 10 minutes +3. Let rise for 1 hour +4. Roll out dough and add toppings +5. Bake at 450°F (230°C) for 15-20 minutes + +## Notes +- For best results, use San Marzano tomatoes for the sauce +- Fresh mozzarella is preferred over pre-shredded +- Add basil leaves after baking + +# Classic Margherita Pizza + +## Ingredients +- Pizza dough +- San Marzano tomatoes +- Fresh mozzarella +- Fresh basil +- Extra virgin olive oil +- Salt + +## Instructions +1. Preheat oven to 450°F (230°C) +2. Roll out the pizza dough +3. Add tomato sauce +4. Add fresh mozzarella +5. Bake for 12-15 minutes +6. Add fresh basil and olive oil + +## Notes +- Best served immediately +- Use high-quality ingredients for authentic taste \ No newline at end of file diff --git a/dev/import-tool/docs/huly/example-workspace/Recipes/DietaryType.yaml b/dev/import-tool/docs/huly/example-workspace/Recipes/DietaryType.yaml new file mode 100644 index 0000000000..0c98fe40e8 --- /dev/null +++ b/dev/import-tool/docs/huly/example-workspace/Recipes/DietaryType.yaml @@ -0,0 +1,7 @@ +class: card:class:Tag +title: DietaryType +properties: + - label: restrictions + type: TypeString + - label: allergens + type: TypeString \ No newline at end of file diff --git a/dev/import-tool/docs/huly/example-workspace/Recipes/Vegan/Mushroom Risotto.md b/dev/import-tool/docs/huly/example-workspace/Recipes/Vegan/Mushroom Risotto.md new file mode 100644 index 0000000000..06e62f0310 --- /dev/null +++ b/dev/import-tool/docs/huly/example-workspace/Recipes/Vegan/Mushroom Risotto.md @@ -0,0 +1,43 @@ +--- +title: Vegan Mushroom Risotto +cookingTime: 45 minutes +servings: 4 +difficulty: Medium +category: Italian +calories: 380 +chef: Maria Green +proteinSource: Mushrooms +isGlutenFree: true +allergens: None +recommendedDesserts: + - ./Chocolate Lava Cake.md + +--- + +# Vegan Mushroom Risotto + +## Ingredients +- 300g Arborio rice +- 500g mixed mushrooms +- 1 onion, finely chopped +- 2 cloves garlic, minced +- 1 cup white wine +- 6 cups vegetable stock +- 2 tbsp nutritional yeast +- 2 tbsp olive oil +- Salt and pepper to taste +- Fresh parsley + +## Instructions +1. Sauté mushrooms until golden +2. Add onion and garlic, cook until soft +3. Add rice and toast for 2 minutes +4. Gradually add wine and stock +5. Cook until rice is creamy +6. Finish with nutritional yeast + +## Notes +- Use a variety of mushrooms for better flavor +- Keep stock warm while adding +- Stir constantly for creamy texture +- Nutritional yeast adds cheesy flavor \ No newline at end of file diff --git a/dev/import-tool/docs/huly/example-workspace/Recipes/Vegan/Vegan Recipe.yaml b/dev/import-tool/docs/huly/example-workspace/Recipes/Vegan/Vegan Recipe.yaml new file mode 100644 index 0000000000..a3e275d172 --- /dev/null +++ b/dev/import-tool/docs/huly/example-workspace/Recipes/Vegan/Vegan Recipe.yaml @@ -0,0 +1,9 @@ +class: card:class:MasterTag +title: Vegan Recipe +properties: + - label: proteinSource + type: TypeString + - label: isGlutenFree + type: TypeBoolean + - label: allergens + type: TypeString \ No newline at end of file diff --git a/dev/import-tool/docs/huly/example-workspace/Recipes/files/cake.png b/dev/import-tool/docs/huly/example-workspace/Recipes/files/cake.png new file mode 100644 index 0000000000..589aa737e8 Binary files /dev/null and b/dev/import-tool/docs/huly/example-workspace/Recipes/files/cake.png differ diff --git a/packages/importer/package.json b/packages/importer/package.json index 9ac60f7e2e..f1cf9c0813 100644 --- a/packages/importer/package.json +++ b/packages/importer/package.json @@ -38,10 +38,12 @@ "@types/node": "~20.11.16", "@types/mime-types": "~2.1.1", "@types/csvtojson": "^2.0.0", - "@types/js-yaml": "^4.0.9" + "@types/js-yaml": "^4.0.9", + "@types/uuid": "^8.3.1" }, "dependencies": { "@hcengineering/attachment": "^0.6.14", + "@hcengineering/card": "^0.6.0", "@hcengineering/chunter": "^0.6.20", "@hcengineering/collaboration": "^0.6.0", "@hcengineering/contact": "^0.6.24", @@ -61,7 +63,8 @@ "mime-types": "~2.1.34", "csvtojson": "^2.0.10", "js-yaml": "^4.1.0", - "image-size": "^1.1.1" + "image-size": "^1.1.1", + "uuid": "^8.3.2" }, "repository": "https://github.com/hcengineering/platform", "publishConfig": { diff --git a/packages/importer/src/huly/cards.ts b/packages/importer/src/huly/cards.ts new file mode 100644 index 0000000000..6b9831ad9f --- /dev/null +++ b/packages/importer/src/huly/cards.ts @@ -0,0 +1,736 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { Attachment } from '@hcengineering/attachment' +import card, { Card, CardSpace, MasterTag, Tag } from '@hcengineering/card' +import core, { + Association, + Attribute, + BlobType, + Class, + Doc, + Enum, + generateId, + Ref, + Relation, + Space +} from '@hcengineering/core' +import * as fs from 'fs' +import * as yaml from 'js-yaml' +import { contentType } from 'mime-types' +import * as path from 'path' +import { IntlString } from '../../../platform/types' +import { Props, UnifiedDoc, UnifiedUpdate, UnifiedFile, UnifiedMixin } from '../types' +import { MetadataRegistry, RelationMetadata } from './metadata' +import { readMarkdownContent, readYamlHeader } from './parsing' + +export interface UnifiedDocProcessResult { + docs: Map>> + mixins: Map>> + updates: Map>> + files: Map +} + +export class CardsProcessor { + constructor (private readonly metadataRegistry: MetadataRegistry) {} + + async processDirectory (directoryPath: string): Promise { + console.log('Start looking for cards stuff in:', directoryPath) + + const result: UnifiedDocProcessResult = { + docs: new Map(), + mixins: new Map(), + updates: new Map(), + files: new Map() + } + + await this.processSystemTypes(directoryPath, result) + + const topLevelTypes = new Array>() + await this.processMetadata(directoryPath, result, topLevelTypes) + + const typesRefs = topLevelTypes.map((type) => type.props._id) as Ref[] + const updateDefaultSpace: UnifiedUpdate = { + _class: card.class.CardSpace, + _id: 'card:space:Default' as Ref, + space: core.space.Model, + props: { + $push: { + types: { + $each: [...new Set(typesRefs)], + $position: 0 + } + } + } + } + result.updates.set('card:space:Default', [updateDefaultSpace]) + + await this.processSystemTypeCards(directoryPath, result, new Map(), new Map()) + await this.processCards(directoryPath, result, new Map(), new Map()) + + return result + } + + private async processSystemTypes (currentPath: string, result: UnifiedDocProcessResult): Promise { + const folders = fs + .readdirSync(currentPath, { withFileTypes: true }) + .filter((entry) => entry.isDirectory()) + .filter((folder) => folder.name === card.types.File || folder.name === card.types.Document) + + for (const folder of folders) { + const folderPath = path.join(currentPath, folder.name) + await this.processMetadata(folderPath, result, [], folder.name as Ref) + } + } + + private async processMetadata ( + currentPath: string, + result: UnifiedDocProcessResult, + types: Array>, + parentMasterTagId?: Ref + ): Promise { + const yamlFiles = fs + .readdirSync(currentPath, { withFileTypes: true }) + .filter((entry) => entry.isFile() && entry.name.endsWith('.yaml')) + + for (const entry of yamlFiles) { + const yamlPath = path.resolve(currentPath, entry.name) + console.log('Reading yaml file:', yamlPath) + const yamlConfig = yaml.load(fs.readFileSync(yamlPath, 'utf8')) as Record + + switch (yamlConfig?.class) { + case card.class.MasterTag: { + const masterTagId = this.metadataRegistry.getRef(yamlPath) as Ref + const masterTag = await this.createMasterTag(yamlConfig, masterTagId, parentMasterTagId) + const masterTagAttrs = await this.createAttributes(yamlPath, yamlConfig, masterTagId) + + this.metadataRegistry.setAttributes(yamlPath, masterTagAttrs) + result.docs.set(yamlPath, [masterTag, ...Array.from(masterTagAttrs.values())]) + types.push(masterTag) + + const masterTagDir = path.join(currentPath, path.basename(yamlPath, '.yaml')) + if (fs.existsSync(masterTagDir) && fs.statSync(masterTagDir).isDirectory()) { + await this.processMetadata(masterTagDir, result, [], masterTagId) + } + break + } + case card.class.Tag: { + if (parentMasterTagId === undefined) { + throw new Error('Tag should be inside master tag folder: ' + currentPath) + } + await this.processTag(yamlPath, yamlConfig, result, parentMasterTagId) + break + } + case core.class.Association: { + const association = await this.createAssociation(yamlPath, yamlConfig) + result.docs.set(yamlPath, [association]) + break + } + case core.class.Enum: { + const enumDoc = await this.createEnum(yamlPath, yamlConfig) + result.docs.set(yamlPath, [enumDoc]) + break + } + default: + console.log('Skipping class: ' + yamlConfig?.class) + } + } + } + + private async processCards ( + currentPath: string, + result: UnifiedDocProcessResult, + masterTagRelations: Map, + masterTagAttrs: Map>>, + masterTagId?: Ref + ): Promise { + const entries = fs.readdirSync(currentPath, { withFileTypes: true }) + + // Check if there is a YAML file for the current directory + const yamlPath = currentPath + '.yaml' + if (fs.existsSync(yamlPath)) { + const yamlConfig = yaml.load(fs.readFileSync(yamlPath, 'utf8')) as Record + if (yamlConfig?.class === card.class.MasterTag) { + masterTagId = this.metadataRegistry.getRef(yamlPath) as Ref + this.metadataRegistry.getAssociations(yamlPath).forEach((relationMetadata, propName) => { + masterTagRelations.set(propName, relationMetadata) + }) + this.metadataRegistry.getAttributes(yamlPath).forEach((attr, propName) => { + masterTagAttrs.set(propName, attr) + }) + } + } + + // Process MD files with the current MasterTag + for (const entry of entries) { + if (entry.isFile() && entry.name.endsWith('.md')) { + const cardPath = path.join(currentPath, entry.name) + const { class: cardType, ...cardProps } = await readYamlHeader(cardPath) + + if (masterTagId !== undefined) { + await this.processCard(result, cardPath, cardProps, masterTagId, masterTagRelations, masterTagAttrs) + } + } + } + + // Process subdirectories that have corresponding YAML files + for (const entry of entries) { + if (!entry.isDirectory()) continue + const dirPath = path.join(currentPath, entry.name) + const dirYamlPath = dirPath + '.yaml' + + // Only process directories that have a corresponding YAML file + if (fs.existsSync(dirYamlPath)) { + await this.processCards(dirPath, result, masterTagRelations, masterTagAttrs, masterTagId) + } + } + } + + private async processSystemTypeCards ( + currentDir: string, + result: UnifiedDocProcessResult, + masterTagRelations: Map, + masterTagAttrs: Map>> + ): Promise { + const entries = fs.readdirSync(currentDir, { withFileTypes: true }) + + for (const entry of entries) { + if (entry.isFile() && entry.name.endsWith('.md')) { + const cardPath = path.join(currentDir, entry.name) + const { class: cardType, ...cardProps } = await readYamlHeader(cardPath) + + if (cardType.startsWith('card:types:') === false) { + throw new Error('Unsupported card type: ' + cardType + ' in ' + cardPath) + } + + await this.processCard(result, cardPath, cardProps, cardType, masterTagRelations, masterTagAttrs) + } else if (entry.isDirectory() && (entry.name === card.types.File || entry.name === card.types.Document)) { + await this.processCards(path.join(currentDir, entry.name), result, masterTagRelations, masterTagAttrs) + } + } + } + + private async processCard ( + result: UnifiedDocProcessResult, + cardPath: string, + cardProps: Record, + masterTagId: Ref, + masterTagRelations: Map, + masterTagAttrs: Map>>, + parentCardId?: Ref + ): Promise { + console.log('Processing card:', cardPath) + + if (cardProps.blobs !== undefined) { + await this.createBlobs(cardProps.blobs, cardPath, result) + } + + const cardWithRelations = await this.createCardWithRelations( + cardProps, + cardPath, + masterTagId, + masterTagRelations, + masterTagAttrs, + result.files, + parentCardId + ) + + if (cardWithRelations.length > 0) { + const docs = result.docs.get(cardPath) ?? [] + docs.push(...cardWithRelations) + result.docs.set(cardPath, docs) + + const card = cardWithRelations[0] as UnifiedDoc + this.metadataRegistry.setRefMetadata(cardPath, card._class, card.props.title) + await this.applyTags(card, cardProps, cardPath, result) + + if (cardProps.attachments !== undefined) { + await this.createAttachments(cardProps.attachments, cardPath, card, result) + } + + const cardDir = path.join(path.dirname(cardPath), path.basename(cardPath, '.md')) + if (fs.existsSync(cardDir) && fs.statSync(cardDir).isDirectory()) { + await this.processCardDirectory( + result, + cardDir, + masterTagId, + masterTagRelations, + masterTagAttrs, + card.props._id as Ref + ) + } + } + } + + private async processCardDirectory ( + result: UnifiedDocProcessResult, + cardDir: string, + masterTagId: Ref, + masterTagRelations: Map, + masterTagAttrs: Map>>, + parentCardId?: Ref + ): Promise { + const entries = fs + .readdirSync(cardDir, { withFileTypes: true }) + .filter((entry) => entry.isFile() && entry.name.endsWith('.md')) + + for (const entry of entries) { + const childCardPath = path.join(cardDir, entry.name) + const { class: cardClass, ...cardProps } = await readYamlHeader(childCardPath) + await this.processCard( + result, + childCardPath, + cardProps, + masterTagId, + masterTagRelations, + masterTagAttrs, + parentCardId + ) + } + } + + private async createMasterTag ( + data: Record, + masterTagId: Ref, + parentMasterTagId?: Ref + ): Promise> { + const { class: _class, title } = data + if (_class !== card.class.MasterTag) { + throw new Error('Invalid master tag data') + } + + return { + _class: card.class.MasterTag, + props: { + _id: masterTagId, + space: core.space.Model, + extends: parentMasterTagId ?? card.class.Card, + label: ('embedded:embedded:' + title) as IntlString, + kind: 0, + icon: card.icon.MasterTag + } + } + } + + private async processTag ( + tagPath: string, + tagConfig: Record, + result: UnifiedDocProcessResult, + masterTagId: Ref, + parentTagId?: Ref + ): Promise { + const tagId = this.metadataRegistry.getRef(tagPath) as Ref + const tag = await this.createTag(tagConfig, tagId, masterTagId, parentTagId) + + const attributes = await this.createAttributes(tagPath, tagConfig, tagId) + this.metadataRegistry.setAttributes(tagPath, attributes) + + const docs = result.docs.get(tagPath) ?? [] + docs.push(tag, ...Array.from(attributes.values())) + result.docs.set(tagPath, docs) + + // Process child tags + const tagDir = path.join(path.dirname(tagPath), path.basename(tagPath, '.yaml')) + if (fs.existsSync(tagDir) && fs.statSync(tagDir).isDirectory()) { + await this.processTagDirectory(tagDir, result, masterTagId, tagId) + } + } + + private async processTagDirectory ( + tagDir: string, + result: UnifiedDocProcessResult, + parentMasterTagId: Ref, + parentTagId: Ref + ): Promise { + const entries = fs.readdirSync(tagDir, { withFileTypes: true }) + + for (const entry of entries) { + if (!entry.isFile() || !entry.name.endsWith('.yaml')) continue + const childTagPath = path.join(tagDir, entry.name) + const childTagConfig = yaml.load(fs.readFileSync(childTagPath, 'utf8')) as Record + + if (childTagConfig?.class === card.class.Tag) { + await this.processTag(childTagPath, childTagConfig, result, parentMasterTagId, parentTagId) + } + } + } + + private async createTag ( + data: Record, + tagId: Ref, + masterTagId: Ref, + parentTagId?: Ref + ): Promise> { + const { class: _class, title } = data + if (_class !== card.class.Tag) { + throw new Error('Invalid tag data') + } + + return { + _class: card.class.Tag, + props: { + _id: tagId, + space: core.space.Model, + extends: parentTagId ?? masterTagId, + label: ('embedded:embedded:' + title) as IntlString, + kind: 2, + icon: card.icon.Tag + } + } + } + + private async createAttributes ( + currentPath: string, + data: Record, + masterTagId: Ref + ): Promise>>> { + if (data.properties === undefined) { + return new Map() + } + + const attributesByLabel = new Map>>() + for (const property of data.properties) { + const type = await this.convertPropertyType(property, currentPath) + + const attr: UnifiedDoc> = { + _class: core.class.Attribute, + props: { + space: core.space.Model, + attributeOf: masterTagId, + name: generateId>(), + label: ('embedded:embedded:' + property.label) as IntlString, + isCustom: true, + type, + defaultValue: property.defaultValue ?? null + } + } + attributesByLabel.set(property.label, attr) + } + return attributesByLabel + } + + private async convertPropertyType (property: Record, currentPath: string): Promise> { + let type: Record = {} + if (property.refTo !== undefined) { + const baseType: Record = {} + baseType._class = core.class.RefTo + const refPath = path.resolve(path.dirname(currentPath), property.refTo) + baseType.to = this.metadataRegistry.getRef(refPath) + baseType.label = core.string.Ref + type = + property.isArray === true + ? { + _class: core.class.ArrOf, + label: core.string.Array, + of: baseType + } + : baseType + } else if (property.enumOf !== undefined) { + const baseType: Record = {} + baseType._class = core.class.EnumOf + const enumPath = path.resolve(path.dirname(currentPath), property.enumOf) + baseType.of = this.metadataRegistry.getRef(enumPath) + baseType.label = 'core:string:Enum' + type = + property.isArray === true + ? { + _class: core.class.ArrOf, + label: core.string.Array, + of: baseType + } + : baseType + } else { + switch (property.type) { + case 'TypeString': + type._class = core.class.TypeString + type.label = core.string.String + break + case 'TypeNumber': + type._class = core.class.TypeNumber + type.label = core.string.Number + break + case 'TypeBoolean': + type._class = core.class.TypeBoolean + type.label = core.string.Boolean + break + default: + throw new Error('Unsupported type: ' + property.type + ' ' + currentPath) + } + } + return type + } + + private async createCardWithRelations ( + cardHeader: Record, + cardPath: string, + masterTagId: Ref, + masterTagRelations: Map, + masterTagAttrs: Map>>, + blobFiles: Map, + parentCardId?: Ref + ): Promise[]> { + const { _class, title, blobs: rawBlobs, tags: rawTags, ...customProperties } = cardHeader + const tags = rawTags !== undefined ? (Array.isArray(rawTags) ? rawTags : [rawTags]) : [] + const blobs = rawBlobs !== undefined ? (Array.isArray(rawBlobs) ? rawBlobs : [rawBlobs]) : [] + + const cardId = this.metadataRegistry.getRef(cardPath) as Ref + const cardProps: Record = { + _id: cardId, + space: 'card:space:Default' as Ref, + title, + parent: parentCardId + } + + if (blobs.length > 0) { + const blobProps: Record = {} + for (const blob of blobs) { + const blobPath = path.resolve(path.dirname(cardPath), blob) + const blobFile = blobFiles.get(blobPath) + if (blobFile === undefined) { + throw new Error('Blob file not found: ' + blobPath + ' from:' + cardPath) + } + blobProps[blobFile._id] = { + file: blobFile._id, + type: blobFile.type, + name: blobFile.name, + metadata: {} // todo: blobFile.metadata + } + } + cardProps.blobs = blobProps + } + + const tagAssociations = new Map() + for (const tag of tags) { + const tagPath = path.resolve(path.dirname(cardPath), tag) + this.metadataRegistry.getAssociations(tagPath).forEach((relationMetadata, propName) => { + tagAssociations.set(propName, relationMetadata) + }) + } + + const relations: UnifiedDoc[] = [] + for (const [key, value] of Object.entries(customProperties)) { + if (masterTagAttrs.has(key)) { + const attr = masterTagAttrs.get(key) + if (attr === undefined) { + throw new Error(`Attribute not found: ${key}, ${cardPath}`) + } + + const attrProps = attr.props + + const attrType = attrProps.type + const attrBaseType = attrType._class === core.class.ArrOf ? attrType.of : attrType + const values = attrType._class === core.class.ArrOf ? value : [value] + const propValues = [] + for (const val of values) { + if (attrBaseType._class === core.class.RefTo) { + const refPath = path.resolve(path.dirname(cardPath), val) + const ref = this.metadataRegistry.getRef(refPath) as Ref + propValues.push(ref) + } else { + propValues.push(val) + } + } + cardProps[attrProps.name] = attrType._class === core.class.ArrOf ? propValues : propValues[0] + } else if (masterTagRelations.has(key) || tagAssociations.has(key)) { + const metadata = masterTagRelations.get(key) ?? tagAssociations.get(key) + if (metadata === undefined) { + throw new Error(`Association not found: ${key}, ${cardPath}`) + } + const values = Array.isArray(value) ? value : [value] + for (const val of values) { + const otherCardPath = path.resolve(path.dirname(cardPath), val) + const otherCardId = this.metadataRegistry.getRef(otherCardPath) as Ref + const relation: UnifiedDoc = this.createRelation(metadata, cardId, otherCardId) + relations.push(relation) + } + } + } + + return [ + { + _class: masterTagId, + collabField: 'content', + contentProvider: () => readMarkdownContent(cardPath), + props: cardProps as Props + }, + ...relations + ] + } + + private createRelation (metadata: RelationMetadata, cardId: Ref, otherCardId: Ref): UnifiedDoc { + const otherCardField = metadata.field === 'docA' ? 'docB' : 'docA' + const relation: UnifiedDoc = { + _class: core.class.Relation, + props: { + _id: generateId(), + space: core.space.Model, + [metadata.field]: cardId, + [otherCardField]: otherCardId, + association: metadata.association + } as unknown as Props + } + return relation + } + + private async applyTags ( + card: UnifiedDoc, + cardHeader: Record, + cardPath: string, + result: UnifiedDocProcessResult + ): Promise { + const tags = + cardHeader.tags !== undefined ? (Array.isArray(cardHeader.tags) ? cardHeader.tags : [cardHeader.tags]) : [] + if (tags.length === 0) return + + const mixins: UnifiedMixin[] = [] + for (const tagPath of tags) { + const cardDir = path.dirname(cardPath) + const tagAbsPath = path.resolve(cardDir, tagPath) + const tagId = this.metadataRegistry.getRef(tagAbsPath) as Ref + + const tagProps: Record = {} + this.metadataRegistry.getAttributes(tagAbsPath).forEach((attr, label) => { + tagProps[attr.props.name] = cardHeader[label] + }) + + const mixin: UnifiedMixin = { + _class: card._class, + mixin: tagId, + props: { + _id: card.props._id as Ref, + space: core.space.Workspace, + __mixin: 'true', + ...tagProps + } as unknown as Props + } + mixins.push(mixin) + } + + if (mixins.length > 0) { + result.mixins.set(cardPath, mixins) + } + } + + private async createAttachments ( + attachments: string[], + cardPath: string, + card: UnifiedDoc, + result: UnifiedDocProcessResult + ): Promise { + for (const attachment of attachments) { + const attachmentPath = path.resolve(path.dirname(cardPath), attachment) + const file = await this.createFile(attachmentPath) + result.files.set(attachmentPath, file) + + const attachmentId = this.metadataRegistry.getRef(attachmentPath) as Ref + const attachmentDoc: UnifiedDoc = { + _class: 'attachment:class:Attachment' as Ref>, + props: { + _id: attachmentId, + space: core.space.Workspace, + attachedTo: card.props._id as Ref, + attachedToClass: card._class, + file: file._id, + name: file.name, + collection: 'attachments', + lastModified: Date.now(), + type: file.type, + size: file.size, + metadata: {} // todo: file.metadata for images + } + } + result.docs.set(attachmentPath, [attachmentDoc]) + } + } + + private async createBlobs (blobs: string[], cardPath: string, result: UnifiedDocProcessResult): Promise { + for (const blob of blobs) { + const blobPath = path.resolve(path.dirname(cardPath), blob) + const file = await this.createFile(blobPath) + result.files.set(blobPath, file) + } + } + + private async createFile (fileAbsPath: string): Promise { + const fileName = path.basename(fileAbsPath) + const fileUuid = this.metadataRegistry.getBlobUuid(fileAbsPath) + const type = contentType(fileName) + const size = fs.statSync(fileAbsPath).size + + const file: UnifiedFile = { + _id: fileUuid, // id for datastore + name: fileName, + type: type !== false ? type : 'application/octet-stream', + size, + blobProvider: async () => { + const data = fs.readFileSync(fileAbsPath) + const props = type !== false ? { type } : undefined + return new Blob([data], props) + } + } + return file + } + + private async createAssociation (yamlPath: string, yamlConfig: Record): Promise> { + const { class: _class, typeA, typeB, type, nameA, nameB } = yamlConfig + + const currentPath = path.dirname(yamlPath) + const associationId = this.metadataRegistry.getRef(yamlPath) as Ref + + const typeAPath = path.resolve(currentPath, typeA) + this.metadataRegistry.addAssociation(typeAPath, nameB, { + association: associationId, + field: 'docA', + type + }) + + const typeBPath = path.resolve(currentPath, typeB) + this.metadataRegistry.addAssociation(typeBPath, nameA, { + association: associationId, + field: 'docB', + type + }) + + const typeAId = this.metadataRegistry.getRef(typeAPath) as Ref + const typeBId = this.metadataRegistry.getRef(typeBPath) as Ref + + return { + _class, + props: { + _id: associationId, + space: core.space.Model, + classA: typeAId, + classB: typeBId, + nameA, + nameB, + type + } as unknown as Props + } + } + + private async createEnum (yamlPath: string, yamlConfig: Record): Promise> { + const { title, values } = yamlConfig + const enumId = this.metadataRegistry.getRef(yamlPath) as Ref + return { + _class: core.class.Enum, + props: { + _id: enumId, + space: core.space.Model, + name: title, + enumValues: values + } + } + } +} diff --git a/packages/importer/src/huly/huly.ts b/packages/importer/src/huly/huly.ts index 75875c9c94..a6f3aa3e13 100644 --- a/packages/importer/src/huly/huly.ts +++ b/packages/importer/src/huly/huly.ts @@ -14,9 +14,17 @@ // import { type Attachment } from '@hcengineering/attachment' +import card from '@hcengineering/card' import contact, { Employee, type Person, type PersonAccount } from '@hcengineering/contact' +import documents, { + ControlledDocument, + DocumentCategory, + DocumentMeta, + DocumentState +} from '@hcengineering/controlled-documents' import { type Class, type Doc, generateId, type Ref, type Space, type TxOperations } from '@hcengineering/core' import document, { type Document } from '@hcengineering/document' +import core from '@hcengineering/model-core' import { MarkupMarkType, type MarkupNode, MarkupNodeType, traverseNode, traverseNodeMarks } from '@hcengineering/text' import tracker, { type Issue, Project } from '@hcengineering/tracker' import * as fs from 'fs' @@ -33,23 +41,19 @@ import { type ImportDocument, ImportDrawing, type ImportIssue, + ImportOrgSpace, type ImportProject, type ImportProjectType, type ImportTeamspace, type ImportWorkspace, - WorkspaceImporter, - ImportOrgSpace + WorkspaceImporter } from '../importer/importer' import { type Logger } from '../importer/logger' import { BaseMarkdownPreprocessor } from '../importer/preprocessor' import { type FileUploader } from '../importer/uploader' -import documents, { - DocumentState, - DocumentCategory, - ControlledDocument, - DocumentMeta -} from '@hcengineering/controlled-documents' - +import { CardsProcessor } from './cards' +import { MetadataRegistry, ReferenceMetadata } from './metadata' +import { readMarkdownContent, readYamlHeader } from './parsing' export interface HulyComment { author: string text: string @@ -154,8 +158,7 @@ class HulyMarkdownPreprocessor extends BaseMarkdownPreprocessor { constructor ( private readonly urlProvider: (id: string) => string, private readonly logger: Logger, - private readonly pathById: Map, string>, - private readonly refMetaByPath: Map, + private readonly metadataRegistry: MetadataRegistry, private readonly attachMetaByPath: Map, personsByName: Map> ) { @@ -191,12 +194,12 @@ class HulyMarkdownPreprocessor extends BaseMarkdownPreprocessor { return } - const sourceMeta = this.refMetaByPath.get(sourcePath) - if (sourceMeta === undefined) { + if (!this.metadataRegistry.hasRefMetadata(sourcePath)) { this.logger.error(`Source metadata not found for ${sourcePath}`) return } + const sourceMeta = this.metadataRegistry.getRefMetadata(sourcePath) this.updateAttachmentMetadata(fullPath, attachmentMeta, id, spaceId, sourceMeta) this.alterImageNode(node, attachmentMeta.id, attachmentMeta.name) } @@ -208,20 +211,18 @@ class HulyMarkdownPreprocessor extends BaseMarkdownPreprocessor { const sourcePath = this.getSourcePath(id) if (sourcePath == null) return - const href = decodeURI(mark.attrs.href) + const href = decodeURI(mark.attrs?.href ?? '') const fullPath = path.resolve(path.dirname(sourcePath), href) - if (this.refMetaByPath.has(fullPath)) { - const targetDocMeta = this.refMetaByPath.get(fullPath) - if (targetDocMeta !== undefined) { - this.alterInternalLinkNode(node, targetDocMeta) - } + if (this.metadataRegistry.hasRefMetadata(fullPath)) { + const targetDocMeta = this.metadataRegistry.getRefMetadata(fullPath) + this.alterMentionNode(node, targetDocMeta) } else if (this.attachMetaByPath.has(fullPath)) { const attachmentMeta = this.attachMetaByPath.get(fullPath) if (attachmentMeta !== undefined) { this.alterAttachmentLinkNode(node, attachmentMeta) - const sourceMeta = this.refMetaByPath.get(sourcePath) - if (sourceMeta !== undefined) { + if (this.metadataRegistry.hasRefMetadata(sourcePath)) { + const sourceMeta = this.metadataRegistry.getRefMetadata(sourcePath) this.updateAttachmentMetadata(fullPath, attachmentMeta, id, spaceId, sourceMeta) } } @@ -250,7 +251,7 @@ class HulyMarkdownPreprocessor extends BaseMarkdownPreprocessor { } } - private alterInternalLinkNode (node: MarkupNode, targetMeta: ReferenceMetadata): void { + private alterMentionNode (node: MarkupNode, targetMeta: ReferenceMetadata): void { node.type = MarkupNodeType.reference node.attrs = { id: targetMeta.id, @@ -282,8 +283,8 @@ class HulyMarkdownPreprocessor extends BaseMarkdownPreprocessor { } private getSourcePath (id: Ref): string | null { - const sourcePath = this.pathById.get(id) - if (sourcePath == null) { + const sourcePath = this.metadataRegistry.getPath(id) + if (sourcePath === undefined) { this.logger.error(`Source file path not found for ${id}`) return null } @@ -306,12 +307,6 @@ class HulyMarkdownPreprocessor extends BaseMarkdownPreprocessor { } } -interface ReferenceMetadata { - id: Ref - class: string - refTitle: string -} - interface AttachmentMetadata { id: Ref name: string @@ -322,15 +317,15 @@ interface AttachmentMetadata { } export class HulyFormatImporter { - private readonly pathById = new Map, string>() - private readonly refMetaByPath = new Map() - private readonly fileMetaByPath = new Map() - private readonly ctrlDocTemplateIdByPath = new Map>() - private personsByName = new Map>() private accountsByEmail = new Map>() private employeesByName = new Map>() + private readonly fileMetaByPath = new Map() + private readonly metadataRegistry = new MetadataRegistry() + + private readonly cardsProcessor = new CardsProcessor(this.metadataRegistry) + constructor ( private readonly client: TxOperations, private readonly fileUploader: FileUploader, @@ -357,8 +352,7 @@ export class HulyFormatImporter { const preprocessor = new HulyMarkdownPreprocessor( this.fileUploader.getFileUrl, this.logger, - this.pathById, - this.refMetaByPath, + this.metadataRegistry, this.fileMetaByPath, this.personsByName ) @@ -505,6 +499,13 @@ export class HulyFormatImporter { break } + case core.class.Enum: + case core.class.Association: + case card.class.MasterTag: { + this.logger.log(`Skipping ${spaceName}: will be processed later`) + break + } + default: { throw new Error(`Unknown space class ${spaceConfig.class} in ${spaceName}`) } @@ -515,7 +516,16 @@ export class HulyFormatImporter { } } - return builder.build() + const { docs, mixins, updates, files } = await this.cardsProcessor.processDirectory(folderPath) + + const ws = builder.build() + ws.unifiedDocs = { + docs: Array.from(docs.values()).flat(), + mixins: Array.from(mixins.values()).flat(), + updates: Array.from(updates.values()).flat(), + files: Array.from(files.values()) + } + return ws } private async processIssuesRecursively ( @@ -529,7 +539,7 @@ export class HulyFormatImporter { for (const issueFile of issueFiles) { const issuePath = path.join(currentPath, issueFile) - const issueHeader = (await this.readYamlHeader(issuePath)) as HulyIssueHeader + const issueHeader = (await readYamlHeader(issuePath)) as HulyIssueHeader if (issueHeader.class === undefined) { this.logger.error(`Skipping ${issueFile}: not an issue`) @@ -540,20 +550,14 @@ export class HulyFormatImporter { const numberMatch = issueFile.match(/^(\d+)\./) const issueNumber = numberMatch?.[1] - const meta: ReferenceMetadata = { - id: generateId(), - class: tracker.class.Issue, - refTitle: `${projectIdentifier}-${issueNumber}` - } - this.pathById.set(meta.id, issuePath) - this.refMetaByPath.set(issuePath, meta) + this.metadataRegistry.setRefMetadata(issuePath, tracker.class.Issue, `${projectIdentifier}-${issueNumber}`) const issue: ImportIssue = { - id: meta.id as Ref, + id: this.metadataRegistry.getRef(issuePath) as Ref, class: tracker.class.Issue, title: issueHeader.title, number: parseInt(issueNumber ?? 'NaN'), - descrProvider: async () => await this.readMarkdownContent(issuePath), + descrProvider: async () => await readMarkdownContent(issuePath), status: { name: issueHeader.status }, priority: issueHeader.priority, estimation: issueHeader.estimation, @@ -613,7 +617,7 @@ export class HulyFormatImporter { for (const docFile of docFiles) { const docPath = path.join(currentPath, docFile) - const docHeader = (await this.readYamlHeader(docPath)) as HulyDocumentHeader + const docHeader = (await readYamlHeader(docPath)) as HulyDocumentHeader if (docHeader.class === undefined) { this.logger.error(`Skipping ${docFile}: not a document`) @@ -621,20 +625,13 @@ export class HulyFormatImporter { } if (docHeader.class === document.class.Document) { - const docMeta: ReferenceMetadata = { - id: generateId(), - class: document.class.Document, - refTitle: docHeader.title - } - - this.pathById.set(docMeta.id, docPath) - this.refMetaByPath.set(docPath, docMeta) + this.metadataRegistry.setRefMetadata(docPath, document.class.Document, docHeader.title) const doc: ImportDocument = { - id: docMeta.id as Ref, + id: this.metadataRegistry.getRef(docPath) as Ref, class: document.class.Document, title: docHeader.title, - descrProvider: async () => await this.readMarkdownContent(docPath), + descrProvider: async () => await readMarkdownContent(docPath), subdocs: [] // Will be added via builder } @@ -661,9 +658,7 @@ export class HulyFormatImporter { for (const docFile of docFiles) { const docPath = path.join(currentPath, docFile) - const docHeader = (await this.readYamlHeader(docPath)) as - | HulyControlledDocumentHeader - | HulyDocumentTemplateHeader + const docHeader = (await readYamlHeader(docPath)) as HulyControlledDocumentHeader | HulyDocumentTemplateHeader if (docHeader.class === undefined) { this.logger.error(`Skipping ${docFile}: not a document`) @@ -677,41 +672,22 @@ export class HulyFormatImporter { throw new Error(`Unknown document class ${docHeader.class} in ${docFile}`) } - const documentMetaId = generateId() - const refMeta: ReferenceMetadata = { - id: documentMetaId, - class: documents.class.DocumentMeta, - refTitle: docHeader.title - } - this.refMetaByPath.set(docPath, refMeta) + const documentMetaId = this.metadataRegistry.getRef(docPath) as Ref + this.metadataRegistry.setRefMetadata(docPath, documents.class.DocumentMeta, docHeader.title) if (docHeader.class === documents.class.ControlledDocument) { - const docId = generateId() - this.pathById.set(docId, docPath) - const doc = await this.processControlledDocument( docHeader as HulyControlledDocumentHeader, docPath, - docId, + this.metadataRegistry.getRef(docPath) as Ref, documentMetaId ) builder.addControlledDocument(spacePath, docPath, doc, parentDocPath) } else { - if (!this.ctrlDocTemplateIdByPath.has(docPath)) { - const templateId = generateId() - this.ctrlDocTemplateIdByPath.set(docPath, templateId) - this.pathById.set(templateId, docPath) - } - - const templateId = this.ctrlDocTemplateIdByPath.get(docPath) - if (templateId === undefined) { - throw new Error(`Template ID not found: ${docPath}`) - } - const template = await this.processControlledDocumentTemplate( docHeader as HulyDocumentTemplateHeader, docPath, - templateId, + this.metadataRegistry.getRef(docPath) as Ref, documentMetaId ) builder.addControlledDocumentTemplate(spacePath, docPath, template, parentDocPath) @@ -835,16 +811,7 @@ export class HulyFormatImporter { throw new Error(`Template file not found: ${templatePath}`) } - if (!this.ctrlDocTemplateIdByPath.has(templatePath)) { - const templateId = generateId() - this.ctrlDocTemplateIdByPath.set(templatePath, templateId) - this.pathById.set(templateId, templatePath) - } - - const templateId = this.ctrlDocTemplateIdByPath.get(templatePath) - if (templateId === undefined) { - throw new Error(`Template ID not found: ${templatePath}`) - } + const templateId = this.metadataRegistry.getRef(templatePath) as Ref return { id, @@ -862,7 +829,7 @@ export class HulyFormatImporter { reviewers: header.reviewers?.map((email) => this.findEmployeeByName(email)) ?? [], approvers: header.approvers?.map((email) => this.findEmployeeByName(email)) ?? [], coAuthors: header.coAuthors?.map((email) => this.findEmployeeByName(email)) ?? [], - descrProvider: async () => await this.readMarkdownContent(docPath), + descrProvider: async () => await readMarkdownContent(docPath), ccReason: header.changeControl?.reason, ccImpact: header.changeControl?.impact, ccDescription: header.changeControl?.description, @@ -900,7 +867,7 @@ export class HulyFormatImporter { reviewers: header.reviewers?.map((email) => this.findEmployeeByName(email)) ?? [], approvers: header.approvers?.map((email) => this.findEmployeeByName(email)) ?? [], coAuthors: header.coAuthors?.map((email) => this.findEmployeeByName(email)) ?? [], - descrProvider: async () => await this.readMarkdownContent(docPath), + descrProvider: async () => await readMarkdownContent(docPath), ccReason: header.changeControl?.reason, ccImpact: header.changeControl?.impact, ccDescription: header.changeControl?.description, @@ -908,22 +875,6 @@ export class HulyFormatImporter { } } - private async readYamlHeader (filePath: string): Promise { - this.logger.log('Read YAML header from: ' + filePath) - const content = fs.readFileSync(filePath, 'utf8') - const match = content.match(/^---\n([\s\S]*?)\n---/) - if (match != null) { - return yaml.load(match[1]) - } - return {} - } - - private async readMarkdownContent (filePath: string): Promise { - const content = fs.readFileSync(filePath, 'utf8') - const match = content.match(/^---\n[\s\S]*?\n---\n(.*)$/s) - return match != null ? match[1] : content - } - private async cachePersonsByNames (): Promise { this.personsByName = (await this.client.findAll(contact.class.Person, {})) .map((person) => { diff --git a/packages/importer/src/huly/metadata.ts b/packages/importer/src/huly/metadata.ts new file mode 100644 index 0000000000..e101ee07dd --- /dev/null +++ b/packages/importer/src/huly/metadata.ts @@ -0,0 +1,121 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { Tag } from '@hcengineering/card' +import { Association, Attribute, Blob as PlatformBlob, Doc, generateId, Ref } from '@hcengineering/core' +import { UnifiedDoc } from '../types' +import { v4 as uuid } from 'uuid' + +export interface RelationMetadata { + association: Ref + field: 'docA' | 'docB' + type: '1:1' | '1:N' | 'N:N' +} +export type MapAttributeToUnifiedDoc = Map>> +export type MapNameToRelation = Map + +export interface TagMetadata { + _id: string + attributes: MapAttributeToUnifiedDoc + associations: MapNameToRelation +} + +export interface ReferenceMetadata { + id: Ref + class: string + refTitle: string +} + +export class MetadataRegistry { + private readonly pathToRef = new Map>() + private readonly refToPath = new Map, string>() + private readonly pathToBlobUuid = new Map>() + private readonly pathToTagMetadata = new Map() + private readonly pathToRefMetadata = new Map() + + public getRef (path: string): Ref { + let ref = this.pathToRef.get(path) + if (ref === undefined) { + ref = generateId() + this.pathToRef.set(path, ref) + this.refToPath.set(ref, path) + } + return ref + } + + public getPath (ref: Ref): string | undefined { + return this.refToPath.get(ref) + } + + public getBlobUuid (path: string): Ref { + let blobUuid = this.pathToBlobUuid.get(path) + if (blobUuid === undefined) { + blobUuid = uuid() as Ref + this.pathToBlobUuid.set(path, blobUuid) + } + return blobUuid + } + + public getAttributes (path: string): MapAttributeToUnifiedDoc { + return this.pathToTagMetadata.get(path)?.attributes ?? new Map() + } + + public getAssociations (path: string): MapNameToRelation { + return this.pathToTagMetadata.get(path)?.associations ?? new Map() + } + + public setAttributes (path: string, attributes: MapAttributeToUnifiedDoc): void { + const metadata = this.pathToTagMetadata.get(path) ?? { + _id: this.getRef(path), + attributes: new Map(), + associations: new Map() + } + metadata.attributes = attributes + this.pathToTagMetadata.set(path, metadata) + } + + public addAssociation (tagPath: string, propName: string, relationMetadata: RelationMetadata): void { + const metadata = this.pathToTagMetadata.get(tagPath) ?? { + _id: this.getRef(tagPath), + attributes: new Map(), + associations: new Map() + } + metadata.associations.set(propName, relationMetadata) + this.pathToTagMetadata.set(tagPath, metadata) + } + + public setRefMetadata (path: string, _class: string, title: string): void { + const ref = this.getRef(path) + this.pathToRefMetadata.set(path, { + id: ref, + class: _class, + refTitle: title + }) + } + + public hasRefMetadata (path: string): boolean { + return this.pathToRefMetadata.has(path) + } + + public getRefMetadata (path: string): ReferenceMetadata { + return ( + this.pathToRefMetadata.get(path) ?? { + id: this.getRef(path), + class: '', + refTitle: '' + } + ) + } +} diff --git a/packages/importer/src/huly/parsing.ts b/packages/importer/src/huly/parsing.ts new file mode 100644 index 0000000000..3846a1ceed --- /dev/null +++ b/packages/importer/src/huly/parsing.ts @@ -0,0 +1,32 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import * as fs from 'fs' +import * as yaml from 'js-yaml' + +export async function readYamlHeader (filePath: string): Promise { + const content = fs.readFileSync(filePath, 'utf8') + const match = content.match(/^---\n([\s\S]*?)\n---/) + if (match != null) { + return yaml.load(match[1]) + } + return {} +} + +export async function readMarkdownContent (filePath: string): Promise { + const content = fs.readFileSync(filePath, 'utf8') + const match = content.match(/^---\n[\s\S]*?\n---\n(.*)$/s) + return match != null ? match[1] : content +} diff --git a/packages/importer/src/importer/importer.ts b/packages/importer/src/importer/importer.ts index 34b8eadf68..3c3f0087b8 100644 --- a/packages/importer/src/importer/importer.ts +++ b/packages/importer/src/importer/importer.ts @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. // -import attachment, { Drawing, type Attachment } from '@hcengineering/attachment' +import attachment, { type Attachment, Drawing } from '@hcengineering/attachment' import chunter, { type ChatMessage } from '@hcengineering/chunter' import { Employee, type Person } from '@hcengineering/contact' import documents, { @@ -32,6 +32,7 @@ import documents, { import core, { type Account, type AttachedData, + AttachedDoc, type Class, type CollaborativeDoc, type Data, @@ -67,14 +68,20 @@ import tracker, { TimeReportDayType } from '@hcengineering/tracker' import view from '@hcengineering/view' +import { Props, UnifiedUpdate, UnifiedDoc, UnifiedFile, UnifiedMixin } from '../types' +import { Logger } from './logger' import { type MarkdownPreprocessor, NoopMarkdownPreprocessor } from './preprocessor' import { type FileUploader } from './uploader' -import { Logger } from './logger' - export interface ImportWorkspace { projectTypes?: ImportProjectType[] spaces?: ImportSpace[] attachments?: ImportAttachment[] + unifiedDocs?: { + docs?: UnifiedDoc>[] + mixins?: UnifiedMixin, Doc>[] + updates?: UnifiedUpdate>[] + files?: UnifiedFile[] + } } export interface ImportProjectType { @@ -206,7 +213,7 @@ export interface ImportControlledDocument extends ImportDoc { id: Ref metaId: Ref class: Ref> - template: Ref // todo: test (it was Ref) + template: Ref code?: string major: number minor: number @@ -236,6 +243,13 @@ export class WorkspaceImporter { ) {} public async performImport (): Promise { + if (this.workspaceData.unifiedDocs !== undefined) { + await this.importUnifiedDocs() + await this.importUnifiedMixins() + await this.importUnifiedUpdates() + await this.uploadFiles() + } + await this.importProjectTypes() await this.importSpaces() await this.importAttachments() @@ -909,7 +923,7 @@ export class WorkspaceImporter { documents.mixin.DocumentTemplate, undefined, parentProjectDocumentId, - templateId as unknown as Ref, // todo: suspisios place + templateId as unknown as Ref, template.docPrefix, template.code ?? '', template.title, @@ -1127,4 +1141,88 @@ export class WorkspaceImporter { return await this.client.createDoc(documents.class.ChangeControl, spaceId, changeControlData) } + + private async importUnifiedDocs (): Promise { + const { docs } = this.workspaceData?.unifiedDocs ?? {} + if (docs === undefined) return + + for (const doc of docs) { + await this.createUnifiedDoc(doc) + } + } + + private async createUnifiedDoc (unifiedDoc: UnifiedDoc>): Promise { + const { _class, props } = unifiedDoc + const _id = props._id ?? generateId>() + if (unifiedDoc.collabField !== undefined) { + const collabId = makeCollabId(_class, _id, unifiedDoc.collabField) + const collabContent = (await unifiedDoc.contentProvider?.()) ?? '' + const res = await this.createCollaborativeContent(_id, collabId, collabContent, props.space) + ;(props as any)[unifiedDoc.collabField] = res + } + + const hierarchy = this.client.getHierarchy() + if (hierarchy.isDerived(_class, core.class.AttachedDoc)) { + const { space, attachedTo, attachedToClass, collection, ...data } = props as unknown as Props + if ( + attachedTo === undefined || + space === undefined || + attachedToClass === undefined || + collection === undefined + ) { + throw new Error('Add collection step must have attachedTo, attachedToClass, collection and space') + } + await this.client.addCollection( + _class, + space, + attachedTo, + attachedToClass, + collection, + data, + _id as Ref | undefined + ) + } else { + await this.client.createDoc(_class, props.space, props as Data>, _id) + } + } + + private async importUnifiedUpdates (): Promise { + const { updates } = this.workspaceData?.unifiedDocs ?? {} + if (updates === undefined) return + + for (const update of updates) { + const { _class, _id, space, props } = update + await this.client.updateDoc(_class, space, _id, props) + } + } + + private async importUnifiedMixins (): Promise { + const { mixins } = this.workspaceData?.unifiedDocs ?? {} + if (mixins === undefined) return + + for (const mixin of mixins) { + const { _class, mixin: mixinClass, props } = mixin + const { _id, space, ...data } = props + await this.client.createMixin( + _id ?? generateId>(), + _class, + space, + mixinClass, + data as Data> + ) + } + } + + private async uploadFiles (): Promise { + const { files } = this.workspaceData?.unifiedDocs ?? {} + if (files === undefined) return + + for (const file of files) { + const id = file._id ?? generateId() + const uploadResult = await this.fileUploader.uploadFile(id, await file.blobProvider()) + if (!uploadResult.success) { + throw new Error('Failed to upload attachment file: ' + file.name) + } + } + } } diff --git a/packages/importer/src/types.ts b/packages/importer/src/types.ts new file mode 100644 index 0000000000..7a2f1c3907 --- /dev/null +++ b/packages/importer/src/types.ts @@ -0,0 +1,33 @@ +import { Class, Data, Doc, Mixin, Ref, Space, Blob as PlatformBlob, DocumentUpdate } from '@hcengineering/core' +export type Props = Data & Partial & { space: Ref } + +export interface UnifiedDoc { + _class: Ref> + props: Props + collabField?: string + contentProvider?: () => Promise +} + +export interface UnifiedMixin { + _class: Ref> + mixin: Ref> + props: Props +} + +export interface UnifiedUpdate { + _class: Ref> + _id: Ref + space: Ref + props: DocumentUpdate +} + +export interface UnifiedFile { + _id: Ref + name: string + type: string + size: number + blobProvider: blobProvider +} + +export type contentProvider = () => Promise +export type blobProvider = () => Promise