mirror of
https://github.com/hcengineering/platform.git
synced 2025-03-20 14:01:51 +00:00
Merge remote-tracking branch 'origin/develop'
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
commit
1e2c171ead
1
dev/.env
1
dev/.env
@ -1 +1,2 @@
|
|||||||
STORAGE_CONFIG="minio|minio?accessKey=minioadmin&secretKey=minioadmin"
|
STORAGE_CONFIG="minio|minio?accessKey=minioadmin&secretKey=minioadmin"
|
||||||
|
MONGO_URL=mongodb://mongodb:27017?compressors=snappy
|
@ -2,6 +2,15 @@ services:
|
|||||||
mongodb:
|
mongodb:
|
||||||
image: 'mongo:7-jammy'
|
image: 'mongo:7-jammy'
|
||||||
container_name: mongodb
|
container_name: mongodb
|
||||||
|
extra_hosts:
|
||||||
|
- "host.docker.internal:host-gateway"
|
||||||
|
healthcheck:
|
||||||
|
test: echo "try { db.currentOp().ok } catch (err) { }" | mongosh --port 27017 --quiet
|
||||||
|
interval: 5s
|
||||||
|
timeout: 30s
|
||||||
|
start_period: 0s
|
||||||
|
start_interval: 1s
|
||||||
|
retries: 30
|
||||||
environment:
|
environment:
|
||||||
- PUID=1000
|
- PUID=1000
|
||||||
- PGID=1000
|
- PGID=1000
|
||||||
@ -52,11 +61,11 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
- ACCOUNT_PORT=3000
|
- ACCOUNT_PORT=3000
|
||||||
- SERVER_SECRET=secret
|
- SERVER_SECRET=secret
|
||||||
- MONGO_URL=mongodb://mongodb:27017?compressors=snappy
|
- MONGO_URL=${MONGO_URL}
|
||||||
- TRANSACTOR_URL=ws://transactor:3333;ws://localhost:3333
|
- TRANSACTOR_URL=ws://host.docker.internal:3333;ws://localhost:3333
|
||||||
- SES_URL=
|
- SES_URL=
|
||||||
- STORAGE_CONFIG=${STORAGE_CONFIG}
|
- STORAGE_CONFIG=${STORAGE_CONFIG}
|
||||||
- FRONT_URL=http://localhost:8087
|
- FRONT_URL=http://host.docker.internal:8087
|
||||||
- RESERVED_DB_NAMES=telegram,gmail,github
|
- RESERVED_DB_NAMES=telegram,gmail,github
|
||||||
- MODEL_ENABLED=*
|
- MODEL_ENABLED=*
|
||||||
- LAST_NAME_FIRST=true
|
- LAST_NAME_FIRST=true
|
||||||
@ -76,8 +85,8 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
- COLLABORATOR_PORT=3078
|
- COLLABORATOR_PORT=3078
|
||||||
- SECRET=secret
|
- SECRET=secret
|
||||||
- ACCOUNTS_URL=http://account:3000
|
- ACCOUNTS_URL=http://host.docker.internal:3000
|
||||||
- MONGO_URL=mongodb://mongodb:27017?compressors=snappy
|
- MONGO_URL=${MONGO_URL}
|
||||||
- 'MONGO_OPTIONS={"appName":"collaborator","maxPoolSize":2}'
|
- 'MONGO_OPTIONS={"appName":"collaborator","maxPoolSize":2}'
|
||||||
- STORAGE_CONFIG=${STORAGE_CONFIG}
|
- STORAGE_CONFIG=${STORAGE_CONFIG}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -96,11 +105,11 @@ services:
|
|||||||
- UV_THREADPOOL_SIZE=10
|
- UV_THREADPOOL_SIZE=10
|
||||||
- SERVER_PORT=8080
|
- SERVER_PORT=8080
|
||||||
- SERVER_SECRET=secret
|
- SERVER_SECRET=secret
|
||||||
- MONGO_URL=mongodb://mongodb:27017?compressors=snappy
|
- MONGO_URL=${MONGO_URL}
|
||||||
- 'MONGO_OPTIONS={"appName":"front","maxPoolSize":1}'
|
- 'MONGO_OPTIONS={"appName":"front","maxPoolSize":1}'
|
||||||
- ACCOUNTS_URL=http://localhost:3000
|
- ACCOUNTS_URL=http://localhost:3000
|
||||||
- UPLOAD_URL=/files
|
- UPLOAD_URL=/files
|
||||||
- ELASTIC_URL=http://elastic:9200
|
- ELASTIC_URL=http://host.docker.internal:9200
|
||||||
- GMAIL_URL=http://localhost:8088
|
- GMAIL_URL=http://localhost:8088
|
||||||
- CALENDAR_URL=http://localhost:8095
|
- CALENDAR_URL=http://localhost:8095
|
||||||
- TELEGRAM_URL=http://localhost:8086
|
- TELEGRAM_URL=http://localhost:8086
|
||||||
@ -135,17 +144,17 @@ services:
|
|||||||
- SERVER_PORT=3333
|
- SERVER_PORT=3333
|
||||||
- SERVER_SECRET=secret
|
- SERVER_SECRET=secret
|
||||||
- ENABLE_COMPRESSION=true
|
- ENABLE_COMPRESSION=true
|
||||||
- ELASTIC_URL=http://elastic:9200
|
- ELASTIC_URL=http://host.docker.internal:9200
|
||||||
- MONGO_URL=mongodb://mongodb:27017?compressors=snappy
|
- MONGO_URL=${MONGO_URL}
|
||||||
- 'MONGO_OPTIONS={"appName": "transactor", "maxPoolSize": 10}'
|
- 'MONGO_OPTIONS={"appName": "transactor", "maxPoolSize": 10}'
|
||||||
- METRICS_CONSOLE=false
|
- METRICS_CONSOLE=false
|
||||||
- METRICS_FILE=metrics.txt
|
- METRICS_FILE=metrics.txt
|
||||||
- STORAGE_CONFIG=${STORAGE_CONFIG}
|
- STORAGE_CONFIG=${STORAGE_CONFIG}
|
||||||
- REKONI_URL=http://rekoni:4004
|
- REKONI_URL=http://host.docker.internal:4004
|
||||||
- FRONT_URL=http://localhost:8087
|
- FRONT_URL=http://localhost:8087
|
||||||
# - APM_SERVER_URL=http://apm-server:8200
|
# - APM_SERVER_URL=http://apm-server:8200
|
||||||
- SES_URL=''
|
- SES_URL=''
|
||||||
- ACCOUNTS_URL=http://account:3000
|
- ACCOUNTS_URL=http://host.docker.internal:3000
|
||||||
- LAST_NAME_FIRST=true
|
- LAST_NAME_FIRST=true
|
||||||
- ELASTIC_INDEX_NAME=local_storage_index
|
- ELASTIC_INDEX_NAME=local_storage_index
|
||||||
- BRANDING_PATH=/var/cfg/branding.json
|
- BRANDING_PATH=/var/cfg/branding.json
|
||||||
@ -166,7 +175,7 @@ services:
|
|||||||
- 4005:4005
|
- 4005:4005
|
||||||
environment:
|
environment:
|
||||||
- SECRET=secret
|
- SECRET=secret
|
||||||
- MONGO_URL=mongodb://mongodb:27017?compressors=snappy
|
- MONGO_URL=${MONGO_URL}
|
||||||
- 'MONGO_OPTIONS={"appName":"print","maxPoolSize":1}'
|
- 'MONGO_OPTIONS={"appName":"print","maxPoolSize":1}'
|
||||||
- STORAGE_CONFIG=${STORAGE_CONFIG}
|
- STORAGE_CONFIG=${STORAGE_CONFIG}
|
||||||
deploy:
|
deploy:
|
||||||
@ -183,11 +192,11 @@ services:
|
|||||||
- ../services/sign/pod-sign/debug/branding.json:/var/cfg/branding.json
|
- ../services/sign/pod-sign/debug/branding.json:/var/cfg/branding.json
|
||||||
environment:
|
environment:
|
||||||
- SECRET=secret
|
- SECRET=secret
|
||||||
- MONGO_URL=mongodb://mongodb:27017
|
- MONGO_URL=${MONGO_URL}
|
||||||
- 'MONGO_OPTIONS={"appName":"sign","maxPoolSize":1}'
|
- 'MONGO_OPTIONS={"appName":"sign","maxPoolSize":1}'
|
||||||
- MINIO_ENDPOINT=minio
|
- MINIO_ENDPOINT=minio
|
||||||
- MINIO_ACCESS_KEY=minioadmin
|
- MINIO_ACCESS_KEY=minioadmin
|
||||||
- ACCOUNTS_URL=http://account:3000
|
- ACCOUNTS_URL=http://host.docker.internal:3000
|
||||||
- MINIO_SECRET_KEY=minioadmin
|
- MINIO_SECRET_KEY=minioadmin
|
||||||
- CERTIFICATE_PATH=/var/cfg/certificate.p12
|
- CERTIFICATE_PATH=/var/cfg/certificate.p12
|
||||||
- SERVICE_ID=sign-service
|
- SERVICE_ID=sign-service
|
||||||
@ -204,10 +213,10 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
- SECRET=secret
|
- SECRET=secret
|
||||||
- PORT=4007
|
- PORT=4007
|
||||||
- MONGO_URL=mongodb://mongodb:27017
|
- MONGO_URL=${MONGO_URL}
|
||||||
- 'MONGO_OPTIONS={"appName":"analytics","maxPoolSize":1}'
|
- 'MONGO_OPTIONS={"appName":"analytics","maxPoolSize":1}'
|
||||||
- SERVICE_ID=analytics-collector-service
|
- SERVICE_ID=analytics-collector-service
|
||||||
- ACCOUNTS_URL=http://account:3000
|
- ACCOUNTS_URL=http://host.docker.internal:3000
|
||||||
- SUPPORT_WORKSPACE=support
|
- SUPPORT_WORKSPACE=support
|
||||||
deploy:
|
deploy:
|
||||||
resources:
|
resources:
|
||||||
@ -218,8 +227,8 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
environment:
|
||||||
- SERVER_SECRET=secret
|
- SERVER_SECRET=secret
|
||||||
- MONGO_URL=mongodb://mongodb:27017
|
- MONGO_URL=${MONGO_URL}
|
||||||
- ACCOUNTS_URL=http://account:3000
|
- ACCOUNTS_URL=http://host.docker.internal:3000
|
||||||
- SUPPORT_WORKSPACE=support
|
- SUPPORT_WORKSPACE=support
|
||||||
- FIRST_NAME=Jolie
|
- FIRST_NAME=Jolie
|
||||||
- LAST_NAME=AI
|
- LAST_NAME=AI
|
||||||
@ -236,11 +245,11 @@ services:
|
|||||||
# environment:
|
# environment:
|
||||||
# - PORT=4020
|
# - PORT=4020
|
||||||
# - BOT_TOKEN=token
|
# - BOT_TOKEN=token
|
||||||
# - MONGO_URL=mongodb://mongodb:27017
|
# - MONGO_URL=${MONGO_URL}
|
||||||
# - MONGO_DB=telegram-bot
|
# - MONGO_DB=telegram-bot
|
||||||
# - SECRET=secret
|
# - SECRET=secret
|
||||||
# - DOMAIN=domain
|
# - DOMAIN=domain
|
||||||
# - ACCOUNTS_URL=http://account:3000
|
# - ACCOUNTS_URL=http://host.docker.internal:3000
|
||||||
# - SERVICE_ID=telegram-bot-service
|
# - SERVICE_ID=telegram-bot-service
|
||||||
# deploy:
|
# deploy:
|
||||||
# resources:
|
# resources:
|
||||||
|
@ -107,7 +107,7 @@ import github, { githubId } from '@hcengineering/github'
|
|||||||
import '@hcengineering/github-assets'
|
import '@hcengineering/github-assets'
|
||||||
|
|
||||||
import { coreId } from '@hcengineering/core'
|
import { coreId } from '@hcengineering/core'
|
||||||
import presentation, { parsePreviewConfig, presentationId } from '@hcengineering/presentation'
|
import presentation, { loadServerConfig, parsePreviewConfig, presentationId } from '@hcengineering/presentation'
|
||||||
|
|
||||||
import { setMetadata } from '@hcengineering/platform'
|
import { setMetadata } from '@hcengineering/platform'
|
||||||
import { setDefaultLanguage } from '@hcengineering/theme'
|
import { setDefaultLanguage } from '@hcengineering/theme'
|
||||||
@ -239,13 +239,12 @@ export async function configurePlatform() {
|
|||||||
})
|
})
|
||||||
configureI18n()
|
configureI18n()
|
||||||
|
|
||||||
const config: Config = await (await fetch(
|
const config: Config = await loadServerConfig(
|
||||||
devConfigHuly
|
devConfigHuly
|
||||||
? '/config-huly.json' : (
|
? '/config-huly.json' : (
|
||||||
devConfigBold ? '/config-bold.json' : (
|
devConfigBold ? '/config-bold.json' : (
|
||||||
devConfig ? '/config-dev.json' : '/config.json'))
|
devConfig ? '/config-dev.json' : '/config.json'))
|
||||||
)
|
)
|
||||||
).json()
|
|
||||||
const branding: BrandingMap = config.BRANDING_URL !== undefined ? await (await fetch(config.BRANDING_URL)).json() : {}
|
const branding: BrandingMap = config.BRANDING_URL !== undefined ? await (await fetch(config.BRANDING_URL)).json() : {}
|
||||||
const myBranding = branding[window.location.host] ?? {}
|
const myBranding = branding[window.location.host] ?? {}
|
||||||
|
|
||||||
|
@ -90,7 +90,7 @@ import {
|
|||||||
restoreRecruitingTaskTypes
|
restoreRecruitingTaskTypes
|
||||||
} from './clean'
|
} from './clean'
|
||||||
import { changeConfiguration } from './configuration'
|
import { changeConfiguration } from './configuration'
|
||||||
import { fixJsonMarkup } from './markup'
|
import { fixJsonMarkup, migrateMarkup } from './markup'
|
||||||
import { fixMixinForeignAttributes, showMixinForeignAttributes } from './mixin'
|
import { fixMixinForeignAttributes, showMixinForeignAttributes } from './mixin'
|
||||||
import { fixAccountEmails, renameAccount } from './renameAccount'
|
import { fixAccountEmails, renameAccount } from './renameAccount'
|
||||||
import { moveFiles } from './storage'
|
import { moveFiles } from './storage'
|
||||||
@ -1290,6 +1290,34 @@ export function devTool (
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
program
|
||||||
|
.command('migrate-markup')
|
||||||
|
.description('migrates collaborative markup to storage')
|
||||||
|
.option('-w, --workspace <workspace>', 'Selected workspace only', '')
|
||||||
|
.option('-c, --concurrency <concurrency>', 'Number of documents being processed concurrently', '10')
|
||||||
|
.action(async (cmd: { workspace: string, concurrency: string }) => {
|
||||||
|
const { mongodbUri } = prepareTools()
|
||||||
|
await withDatabase(mongodbUri, async (db, client) => {
|
||||||
|
await withStorage(mongodbUri, async (adapter) => {
|
||||||
|
const workspaces = await listWorkspacesPure(db)
|
||||||
|
for (const workspace of workspaces) {
|
||||||
|
if (cmd.workspace !== '' && workspace.workspace !== cmd.workspace) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const wsId = getWorkspaceId(workspace.workspace)
|
||||||
|
const endpoint = await getTransactorEndpoint(generateToken(systemAccountEmail, wsId), 'external')
|
||||||
|
|
||||||
|
console.log('processing workspace', workspace.workspace)
|
||||||
|
|
||||||
|
await migrateMarkup(toolCtx, adapter, wsId, client, endpoint, parseInt(cmd.concurrency))
|
||||||
|
|
||||||
|
console.log('...done', workspace.workspace)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
program
|
program
|
||||||
.command('remove-duplicates-ids <workspaces>')
|
.command('remove-duplicates-ids <workspaces>')
|
||||||
.description('remove duplicates ids for futue migration')
|
.description('remove duplicates ids for futue migration')
|
||||||
|
@ -1,19 +1,23 @@
|
|||||||
|
import { saveCollaborativeDoc } from '@hcengineering/collaboration'
|
||||||
import core, {
|
import core, {
|
||||||
type AnyAttribute,
|
type AnyAttribute,
|
||||||
type Class,
|
type Class,
|
||||||
type Client as CoreClient,
|
type Client as CoreClient,
|
||||||
type Doc,
|
type Doc,
|
||||||
type Domain,
|
type Domain,
|
||||||
|
type Hierarchy,
|
||||||
type MeasureContext,
|
type MeasureContext,
|
||||||
type Ref,
|
type Ref,
|
||||||
type WorkspaceId,
|
type WorkspaceId,
|
||||||
|
RateLimiter,
|
||||||
|
collaborativeDocParse,
|
||||||
makeCollaborativeDoc
|
makeCollaborativeDoc
|
||||||
} from '@hcengineering/core'
|
} from '@hcengineering/core'
|
||||||
import { getMongoClient, getWorkspaceDB } from '@hcengineering/mongo'
|
import { getMongoClient, getWorkspaceDB } from '@hcengineering/mongo'
|
||||||
import { type StorageAdapter } from '@hcengineering/server-core'
|
import { type StorageAdapter } from '@hcengineering/server-core'
|
||||||
import { connect } from '@hcengineering/server-tool'
|
import { connect } from '@hcengineering/server-tool'
|
||||||
import { jsonToText } from '@hcengineering/text'
|
import { jsonToText, markupToYDoc } from '@hcengineering/text'
|
||||||
import { type Db } from 'mongodb'
|
import { type Db, type FindCursor, type MongoClient } from 'mongodb'
|
||||||
|
|
||||||
export async function fixJsonMarkup (
|
export async function fixJsonMarkup (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
@ -110,3 +114,105 @@ async function processFixJsonMarkupFor (
|
|||||||
|
|
||||||
console.log('...processed', docs.length)
|
console.log('...processed', docs.length)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function migrateMarkup (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
storageAdapter: StorageAdapter,
|
||||||
|
workspaceId: WorkspaceId,
|
||||||
|
client: MongoClient,
|
||||||
|
transactorUrl: string,
|
||||||
|
concurrency: number
|
||||||
|
): Promise<void> {
|
||||||
|
const connection = (await connect(transactorUrl, workspaceId, undefined, {
|
||||||
|
mode: 'backup'
|
||||||
|
})) as unknown as CoreClient
|
||||||
|
|
||||||
|
const hierarchy = connection.getHierarchy()
|
||||||
|
|
||||||
|
const workspaceDb = client.db(workspaceId.name)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const classes = hierarchy.getDescendants(core.class.Doc)
|
||||||
|
for (const _class of classes) {
|
||||||
|
const domain = hierarchy.findDomain(_class)
|
||||||
|
if (domain === undefined) continue
|
||||||
|
|
||||||
|
const allAttributes = hierarchy.getAllAttributes(_class)
|
||||||
|
const attributes = Array.from(allAttributes.values()).filter((attribute) => {
|
||||||
|
return hierarchy.isDerived(attribute.type._class, 'core:class:TypeCollaborativeMarkup' as Ref<Class<Doc>>)
|
||||||
|
})
|
||||||
|
|
||||||
|
if (attributes.length === 0) continue
|
||||||
|
if (hierarchy.isMixin(_class) && attributes.every((p) => p.attributeOf !== _class)) continue
|
||||||
|
|
||||||
|
const collection = workspaceDb.collection(domain)
|
||||||
|
|
||||||
|
const filter = hierarchy.isMixin(_class) ? { [_class]: { $exists: true } } : { _class }
|
||||||
|
|
||||||
|
const count = await collection.countDocuments(filter)
|
||||||
|
const iterator = collection.find<Doc>(filter)
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.log('processing', _class, '->', count)
|
||||||
|
await processMigrateMarkupFor(ctx, hierarchy, storageAdapter, workspaceId, attributes, iterator, concurrency)
|
||||||
|
} finally {
|
||||||
|
await iterator.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
await connection.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processMigrateMarkupFor (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
hierarchy: Hierarchy,
|
||||||
|
storageAdapter: StorageAdapter,
|
||||||
|
workspaceId: WorkspaceId,
|
||||||
|
attributes: AnyAttribute[],
|
||||||
|
iterator: FindCursor<Doc>,
|
||||||
|
concurrency: number
|
||||||
|
): Promise<void> {
|
||||||
|
const rateLimiter = new RateLimiter(concurrency)
|
||||||
|
|
||||||
|
let processed = 0
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const doc = await iterator.next()
|
||||||
|
if (doc === null) break
|
||||||
|
|
||||||
|
const timestamp = Date.now()
|
||||||
|
const revisionId = `${timestamp}`
|
||||||
|
|
||||||
|
await rateLimiter.exec(async () => {
|
||||||
|
for (const attribute of attributes) {
|
||||||
|
const collaborativeDoc = makeCollaborativeDoc(doc._id, attribute.name, revisionId)
|
||||||
|
const { documentId } = collaborativeDocParse(collaborativeDoc)
|
||||||
|
|
||||||
|
const value = hierarchy.isMixin(attribute.attributeOf)
|
||||||
|
? ((doc as any)[attribute.attributeOf]?.[attribute.name] as string)
|
||||||
|
: ((doc as any)[attribute.name] as string)
|
||||||
|
|
||||||
|
if (value != null && value.startsWith('{')) {
|
||||||
|
const blob = await storageAdapter.stat(ctx, workspaceId, documentId)
|
||||||
|
// only for documents not in storage
|
||||||
|
if (blob === undefined) {
|
||||||
|
const ydoc = markupToYDoc(value, attribute.name)
|
||||||
|
await saveCollaborativeDoc(storageAdapter, workspaceId, collaborativeDoc, ydoc, ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
processed += 1
|
||||||
|
|
||||||
|
if (processed % 100 === 0) {
|
||||||
|
await rateLimiter.waitProcessing()
|
||||||
|
console.log('...processing', processed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await rateLimiter.waitProcessing()
|
||||||
|
|
||||||
|
console.log('processed', processed)
|
||||||
|
}
|
||||||
|
@ -14,22 +14,22 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
import { type Blob, type MeasureContext, type WorkspaceId, RateLimiter } from '@hcengineering/core'
|
import { type Blob, type MeasureContext, type WorkspaceId, RateLimiter } from '@hcengineering/core'
|
||||||
import { type StorageAdapterEx } from '@hcengineering/server-core'
|
import { type StorageAdapter, type StorageAdapterEx } from '@hcengineering/server-core'
|
||||||
import { PassThrough } from 'stream'
|
import { PassThrough } from 'stream'
|
||||||
|
|
||||||
|
export interface MoveFilesParams {
|
||||||
|
blobSizeLimitMb: number
|
||||||
|
concurrency: number
|
||||||
|
}
|
||||||
|
|
||||||
export async function moveFiles (
|
export async function moveFiles (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
exAdapter: StorageAdapterEx,
|
exAdapter: StorageAdapterEx,
|
||||||
params: {
|
params: MoveFilesParams
|
||||||
blobSizeLimitMb: number
|
|
||||||
concurrency: number
|
|
||||||
}
|
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
if (exAdapter.adapters === undefined) return
|
if (exAdapter.adapters === undefined) return
|
||||||
|
|
||||||
let count = 0
|
|
||||||
|
|
||||||
console.log('start', workspaceId.name)
|
console.log('start', workspaceId.name)
|
||||||
|
|
||||||
// We assume that the adapter moves all new files to the default adapter
|
// We assume that the adapter moves all new files to the default adapter
|
||||||
@ -38,74 +38,125 @@ export async function moveFiles (
|
|||||||
|
|
||||||
for (const [name, adapter] of exAdapter.adapters.entries()) {
|
for (const [name, adapter] of exAdapter.adapters.entries()) {
|
||||||
if (name === target) continue
|
if (name === target) continue
|
||||||
|
|
||||||
console.log('moving from', name, 'limit', params.blobSizeLimitMb, 'concurrency', params.concurrency)
|
console.log('moving from', name, 'limit', params.blobSizeLimitMb, 'concurrency', params.concurrency)
|
||||||
|
|
||||||
|
// we attempt retry the whole process in case of failure
|
||||||
|
// files that were already moved will be skipped
|
||||||
|
await retryOnFailure(ctx, 5, async () => {
|
||||||
|
await processAdapter(ctx, exAdapter, adapter, workspaceId, params)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('...done', workspaceId.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processAdapter (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
exAdapter: StorageAdapterEx,
|
||||||
|
adapter: StorageAdapter,
|
||||||
|
workspaceId: WorkspaceId,
|
||||||
|
params: MoveFilesParams
|
||||||
|
): Promise<void> {
|
||||||
|
const target = exAdapter.defaultAdapter
|
||||||
|
|
||||||
let time = Date.now()
|
let time = Date.now()
|
||||||
|
let processedCnt = 0
|
||||||
|
let processedBytes = 0
|
||||||
|
let skippedCnt = 0
|
||||||
|
let movedCnt = 0
|
||||||
|
let movedBytes = 0
|
||||||
|
let batchBytes = 0
|
||||||
|
|
||||||
const rateLimiter = new RateLimiter(params.concurrency)
|
const rateLimiter = new RateLimiter(params.concurrency)
|
||||||
|
|
||||||
const iterator = await adapter.listStream(ctx, workspaceId)
|
const iterator = await adapter.listStream(ctx, workspaceId)
|
||||||
|
try {
|
||||||
while (true) {
|
while (true) {
|
||||||
const data = await iterator.next()
|
const data = await iterator.next()
|
||||||
if (data === undefined) break
|
if (data === undefined) break
|
||||||
|
|
||||||
const blob = await exAdapter.stat(ctx, workspaceId, data._id)
|
const blob =
|
||||||
if (blob === undefined) continue
|
(await exAdapter.stat(ctx, workspaceId, data._id)) ?? (await adapter.stat(ctx, workspaceId, data._id))
|
||||||
if (blob.provider === target) continue
|
|
||||||
|
|
||||||
if (blob.size > params.blobSizeLimitMb * 1024 * 1024) {
|
if (blob === undefined) {
|
||||||
console.log('skipping large blob', name, data._id, Math.round(blob.size / 1024 / 1024))
|
console.error('blob not found', data._id)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (blob.provider !== target) {
|
||||||
|
if (blob.size <= params.blobSizeLimitMb * 1024 * 1024) {
|
||||||
await rateLimiter.exec(async () => {
|
await rateLimiter.exec(async () => {
|
||||||
try {
|
try {
|
||||||
await retryOnFailure(
|
await retryOnFailure(
|
||||||
ctx,
|
ctx,
|
||||||
5,
|
5,
|
||||||
async () => {
|
async () => {
|
||||||
await moveFile(ctx, exAdapter, workspaceId, blob)
|
await processFile(ctx, exAdapter, adapter, workspaceId, blob)
|
||||||
},
|
},
|
||||||
50
|
50
|
||||||
)
|
)
|
||||||
|
movedCnt += 1
|
||||||
|
movedBytes += blob.size
|
||||||
|
batchBytes += blob.size
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('failed to process blob', name, data._id, err)
|
console.error('failed to process blob', data._id, err)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
skippedCnt += 1
|
||||||
|
console.log('skipping large blob', data._id, Math.round(blob.size / 1024 / 1024))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
count += 1
|
processedCnt += 1
|
||||||
if (count % 100 === 0) {
|
processedBytes += blob.size
|
||||||
|
|
||||||
|
if (processedCnt % 100 === 0) {
|
||||||
await rateLimiter.waitProcessing()
|
await rateLimiter.waitProcessing()
|
||||||
|
|
||||||
const duration = Date.now() - time
|
const duration = Date.now() - time
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
'...processed',
|
||||||
|
processedCnt,
|
||||||
|
Math.round(processedBytes / 1024 / 1024) + 'MB',
|
||||||
|
'moved',
|
||||||
|
movedCnt,
|
||||||
|
Math.round(movedBytes / 1024 / 1024) + 'MB',
|
||||||
|
'+' + Math.round(batchBytes / 1024 / 1024) + 'MB',
|
||||||
|
'skipped',
|
||||||
|
skippedCnt,
|
||||||
|
Math.round(duration / 1000) + 's'
|
||||||
|
)
|
||||||
|
|
||||||
|
batchBytes = 0
|
||||||
time = Date.now()
|
time = Date.now()
|
||||||
console.log('...moved: ', count, Math.round(duration / 1000))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await rateLimiter.waitProcessing()
|
await rateLimiter.waitProcessing()
|
||||||
|
} finally {
|
||||||
await iterator.close()
|
await iterator.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('...done', workspaceId.name, count)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function moveFile (
|
async function processFile (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
exAdapter: StorageAdapterEx,
|
exAdapter: StorageAdapterEx,
|
||||||
|
adapter: StorageAdapter,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
blob: Blob
|
blob: Blob
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const readable = await exAdapter.get(ctx, workspaceId, blob._id)
|
const readable = await adapter.get(ctx, workspaceId, blob._id)
|
||||||
try {
|
try {
|
||||||
readable.on('end', () => {
|
readable.on('end', () => {
|
||||||
readable.destroy()
|
readable.destroy()
|
||||||
})
|
})
|
||||||
const stream = readable.pipe(new PassThrough())
|
const stream = readable.pipe(new PassThrough())
|
||||||
await exAdapter.put(ctx, workspaceId, blob._id, stream, blob.contentType, blob.size)
|
await exAdapter.put(ctx, workspaceId, blob._id, stream, blob.contentType, blob.size)
|
||||||
} catch (err) {
|
} finally {
|
||||||
readable.destroy()
|
readable.destroy()
|
||||||
throw err
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -115,18 +166,19 @@ async function retryOnFailure<T> (
|
|||||||
op: () => Promise<T>,
|
op: () => Promise<T>,
|
||||||
delay: number = 0
|
delay: number = 0
|
||||||
): Promise<T> {
|
): Promise<T> {
|
||||||
let error: any
|
let lastError: any
|
||||||
while (retries > 0) {
|
while (retries > 0) {
|
||||||
retries--
|
retries--
|
||||||
try {
|
try {
|
||||||
return await op()
|
return await op()
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
error = err
|
console.error(err)
|
||||||
|
lastError = err
|
||||||
ctx.error('error', { err, retries })
|
ctx.error('error', { err, retries })
|
||||||
if (retries !== 0 && delay > 0) {
|
if (retries !== 0 && delay > 0) {
|
||||||
await new Promise((resolve) => setTimeout(resolve, delay))
|
await new Promise((resolve) => setTimeout(resolve, delay))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw error
|
throw lastError
|
||||||
}
|
}
|
||||||
|
@ -165,16 +165,20 @@ async function migrateCollaborativeContentToStorage (client: MigrationClient): P
|
|||||||
const domain = hierarchy.findDomain(_class)
|
const domain = hierarchy.findDomain(_class)
|
||||||
if (domain === undefined) continue
|
if (domain === undefined) continue
|
||||||
|
|
||||||
const attributes = hierarchy.getAllAttributes(_class)
|
const allAttributes = hierarchy.getAllAttributes(_class)
|
||||||
const filtered = Array.from(attributes.values()).filter((attribute) => {
|
const attributes = Array.from(allAttributes.values()).filter((attribute) => {
|
||||||
return hierarchy.isDerived(attribute.type._class, core.class.TypeCollaborativeDoc)
|
return hierarchy.isDerived(attribute.type._class, core.class.TypeCollaborativeDoc)
|
||||||
})
|
})
|
||||||
if (filtered.length === 0) continue
|
|
||||||
|
|
||||||
const iterator = await client.traverse(domain, { _class })
|
if (attributes.length === 0) continue
|
||||||
|
if (hierarchy.isMixin(_class) && attributes.every((p) => p.attributeOf !== _class)) continue
|
||||||
|
|
||||||
|
const query = hierarchy.isMixin(_class) ? { [_class]: { $exists: true } } : { _class }
|
||||||
|
|
||||||
|
const iterator = await client.traverse(domain, query)
|
||||||
try {
|
try {
|
||||||
console.log('processing', _class)
|
console.log('processing', _class)
|
||||||
await processMigrateContentFor(ctx, domain, filtered, client, storageAdapter, iterator)
|
await processMigrateContentFor(ctx, domain, attributes, client, storageAdapter, iterator)
|
||||||
} finally {
|
} finally {
|
||||||
await iterator.close()
|
await iterator.close()
|
||||||
}
|
}
|
||||||
@ -189,6 +193,8 @@ async function processMigrateContentFor (
|
|||||||
storageAdapter: StorageAdapter,
|
storageAdapter: StorageAdapter,
|
||||||
iterator: MigrationIterator<Doc>
|
iterator: MigrationIterator<Doc>
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
const hierarchy = client.hierarchy
|
||||||
|
|
||||||
const rateLimiter = new RateLimiter(10)
|
const rateLimiter = new RateLimiter(10)
|
||||||
|
|
||||||
let processed = 0
|
let processed = 0
|
||||||
@ -211,7 +217,14 @@ async function processMigrateContentFor (
|
|||||||
for (const attribute of attributes) {
|
for (const attribute of attributes) {
|
||||||
const collaborativeDoc = makeCollaborativeDoc(doc._id, attribute.name, revisionId)
|
const collaborativeDoc = makeCollaborativeDoc(doc._id, attribute.name, revisionId)
|
||||||
|
|
||||||
const value = (doc as any)[attribute.name] as string
|
const value = hierarchy.isMixin(attribute.attributeOf)
|
||||||
|
? ((doc as any)[attribute.attributeOf]?.[attribute.name] as string)
|
||||||
|
: ((doc as any)[attribute.name] as string)
|
||||||
|
|
||||||
|
const attributeName = hierarchy.isMixin(attribute.attributeOf)
|
||||||
|
? `${attribute.attributeOf}.${attribute.name}`
|
||||||
|
: attribute.name
|
||||||
|
|
||||||
if (value != null && value.startsWith('{')) {
|
if (value != null && value.startsWith('{')) {
|
||||||
const { documentId } = collaborativeDocParse(collaborativeDoc)
|
const { documentId } = collaborativeDocParse(collaborativeDoc)
|
||||||
const blob = await storageAdapter.stat(ctx, client.workspaceId, documentId)
|
const blob = await storageAdapter.stat(ctx, client.workspaceId, documentId)
|
||||||
@ -221,9 +234,9 @@ async function processMigrateContentFor (
|
|||||||
await saveCollaborativeDoc(storageAdapter, client.workspaceId, collaborativeDoc, ydoc, ctx)
|
await saveCollaborativeDoc(storageAdapter, client.workspaceId, collaborativeDoc, ydoc, ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
update[attribute.name] = collaborativeDoc
|
update[attributeName] = collaborativeDoc
|
||||||
} else if (value == null) {
|
} else if (value == null || value === '') {
|
||||||
update[attribute.name] = makeCollaborativeDoc(doc._id, attribute.name, revisionId)
|
update[attributeName] = collaborativeDoc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -693,3 +693,28 @@ export function setDownloadProgress (percent: number): void {
|
|||||||
|
|
||||||
upgradeDownloadProgress.set(Math.round(percent))
|
upgradeDownloadProgress.set(Math.round(percent))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function loadServerConfig (url: string): Promise<any> {
|
||||||
|
let retries = 5
|
||||||
|
let res: Response | undefined
|
||||||
|
|
||||||
|
do {
|
||||||
|
try {
|
||||||
|
res = await fetch(url)
|
||||||
|
break
|
||||||
|
} catch (e: any) {
|
||||||
|
retries--
|
||||||
|
if (retries === 0) {
|
||||||
|
throw new Error(`Failed to load server config: ${e}`)
|
||||||
|
}
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000 * (5 - retries)))
|
||||||
|
}
|
||||||
|
} while (retries > 0)
|
||||||
|
|
||||||
|
if (res === undefined) {
|
||||||
|
// In theory should never get here
|
||||||
|
throw new Error('Failed to load server config')
|
||||||
|
}
|
||||||
|
|
||||||
|
return await res.json()
|
||||||
|
}
|
||||||
|
@ -11,10 +11,15 @@ import core, {
|
|||||||
} from '@hcengineering/core'
|
} from '@hcengineering/core'
|
||||||
import login, { loginId } from '@hcengineering/login'
|
import login, { loginId } from '@hcengineering/login'
|
||||||
import { getMetadata, getResource, setMetadata } from '@hcengineering/platform'
|
import { getMetadata, getResource, setMetadata } from '@hcengineering/platform'
|
||||||
import presentation, { closeClient, refreshClient, setClient, setPresentationCookie } from '@hcengineering/presentation'
|
import presentation, {
|
||||||
|
closeClient,
|
||||||
|
loadServerConfig,
|
||||||
|
refreshClient,
|
||||||
|
setClient,
|
||||||
|
setPresentationCookie
|
||||||
|
} from '@hcengineering/presentation'
|
||||||
import { fetchMetadataLocalStorage, getCurrentLocation, navigate, setMetadataLocalStorage } from '@hcengineering/ui'
|
import { fetchMetadataLocalStorage, getCurrentLocation, navigate, setMetadataLocalStorage } from '@hcengineering/ui'
|
||||||
import { writable } from 'svelte/store'
|
import { writable } from 'svelte/store'
|
||||||
|
|
||||||
export const versionError = writable<string | undefined>(undefined)
|
export const versionError = writable<string | undefined>(undefined)
|
||||||
const versionStorageKey = 'last_server_version'
|
const versionStorageKey = 'last_server_version'
|
||||||
|
|
||||||
@ -113,7 +118,7 @@ export async function connect (title: string): Promise<Client | undefined> {
|
|||||||
const frontUrl = getMetadata(presentation.metadata.FrontUrl) ?? ''
|
const frontUrl = getMetadata(presentation.metadata.FrontUrl) ?? ''
|
||||||
const currentFrontVersion = getMetadata(presentation.metadata.FrontVersion)
|
const currentFrontVersion = getMetadata(presentation.metadata.FrontVersion)
|
||||||
if (currentFrontVersion !== undefined) {
|
if (currentFrontVersion !== undefined) {
|
||||||
const frontConfig = await (await fetch(concatLink(frontUrl, '/config.json'))).json()
|
const frontConfig = await loadServerConfig(concatLink(frontUrl, '/config.json'))
|
||||||
if (frontConfig?.version !== undefined && frontConfig.version !== currentFrontVersion) {
|
if (frontConfig?.version !== undefined && frontConfig.version !== currentFrontVersion) {
|
||||||
location.reload()
|
location.reload()
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,7 @@ import login, { loginId } from '@hcengineering/login'
|
|||||||
import { broadcastEvent, getMetadata, getResource, setMetadata } from '@hcengineering/platform'
|
import { broadcastEvent, getMetadata, getResource, setMetadata } from '@hcengineering/platform'
|
||||||
import presentation, {
|
import presentation, {
|
||||||
closeClient,
|
closeClient,
|
||||||
|
loadServerConfig,
|
||||||
purgeClient,
|
purgeClient,
|
||||||
refreshClient,
|
refreshClient,
|
||||||
setClient,
|
setClient,
|
||||||
@ -221,7 +222,7 @@ export async function connect (title: string): Promise<Client | undefined> {
|
|||||||
const frontUrl = getMetadata(presentation.metadata.FrontUrl) ?? ''
|
const frontUrl = getMetadata(presentation.metadata.FrontUrl) ?? ''
|
||||||
const currentFrontVersion = getMetadata(presentation.metadata.FrontVersion)
|
const currentFrontVersion = getMetadata(presentation.metadata.FrontVersion)
|
||||||
if (currentFrontVersion !== undefined) {
|
if (currentFrontVersion !== undefined) {
|
||||||
const frontConfig = await (await fetch(concatLink(frontUrl, '/config.json'))).json()
|
const frontConfig = await loadServerConfig(concatLink(frontUrl, '/config.json'))
|
||||||
if (frontConfig?.version !== undefined && frontConfig.version !== currentFrontVersion) {
|
if (frontConfig?.version !== undefined && frontConfig.version !== currentFrontVersion) {
|
||||||
location.reload()
|
location.reload()
|
||||||
}
|
}
|
||||||
|
@ -75,7 +75,6 @@ import { createHash } from 'crypto'
|
|||||||
import {
|
import {
|
||||||
type AbstractCursor,
|
type AbstractCursor,
|
||||||
type AnyBulkWriteOperation,
|
type AnyBulkWriteOperation,
|
||||||
type BulkWriteResult,
|
|
||||||
type Collection,
|
type Collection,
|
||||||
type Db,
|
type Db,
|
||||||
type Document,
|
type Document,
|
||||||
@ -1107,57 +1106,6 @@ class MongoAdapter extends MongoAdapterBase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bulkOps = new Map<Domain, AnyBulkWriteOperation<Doc>[]>()
|
|
||||||
|
|
||||||
async _pushBulk (ctx: MeasureContext): Promise<void> {
|
|
||||||
const bulk = Array.from(this.bulkOps.entries())
|
|
||||||
this.bulkOps.clear()
|
|
||||||
if (bulk.length === 0) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const promises: Promise<BulkWriteResult>[] = []
|
|
||||||
for (const [domain, ops] of bulk) {
|
|
||||||
if (ops === undefined || ops.length === 0) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const coll = this.db.collection<Doc>(domain)
|
|
||||||
|
|
||||||
promises.push(
|
|
||||||
addOperation(
|
|
||||||
ctx,
|
|
||||||
'bulk-write',
|
|
||||||
{ domain, operations: ops.length },
|
|
||||||
async (ctx) =>
|
|
||||||
await ctx.with(
|
|
||||||
'bulk-write',
|
|
||||||
{ domain },
|
|
||||||
() =>
|
|
||||||
coll.bulkWrite(ops, {
|
|
||||||
ordered: false
|
|
||||||
}),
|
|
||||||
{
|
|
||||||
domain,
|
|
||||||
operations: ops.length
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
await Promise.all(promises)
|
|
||||||
}
|
|
||||||
|
|
||||||
async pushBulk (ctx: MeasureContext, domain: Domain, ops: AnyBulkWriteOperation<Doc>[]): Promise<void> {
|
|
||||||
const existing = this.bulkOps.get(domain)
|
|
||||||
if (existing !== undefined) {
|
|
||||||
existing.push(...ops)
|
|
||||||
} else {
|
|
||||||
this.bulkOps.set(domain, ops)
|
|
||||||
}
|
|
||||||
// We need to wait next cycle to send request
|
|
||||||
await new Promise<void>((resolve) => setImmediate(resolve))
|
|
||||||
await this._pushBulk(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
async tx (ctx: MeasureContext, ...txes: Tx[]): Promise<TxResult[]> {
|
async tx (ctx: MeasureContext, ...txes: Tx[]): Promise<TxResult[]> {
|
||||||
const result: TxResult[] = []
|
const result: TxResult[] = []
|
||||||
|
|
||||||
@ -1171,6 +1119,7 @@ class MongoAdapter extends MongoAdapterBase {
|
|||||||
|
|
||||||
const stTime = Date.now()
|
const stTime = Date.now()
|
||||||
const st = Date.now()
|
const st = Date.now()
|
||||||
|
let promises: Promise<any>[] = []
|
||||||
for (const [domain, txs] of byDomain) {
|
for (const [domain, txs] of byDomain) {
|
||||||
if (domain === undefined) {
|
if (domain === undefined) {
|
||||||
continue
|
continue
|
||||||
@ -1227,9 +1176,37 @@ class MongoAdapter extends MongoAdapterBase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (ops.length > 0) {
|
if (ops.length > 0) {
|
||||||
await this.pushBulk(ctx, domain, ops)
|
if (ops === undefined || ops.length === 0) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const coll = this.db.collection<Doc>(domain)
|
||||||
|
|
||||||
|
promises.push(
|
||||||
|
addOperation(
|
||||||
|
ctx,
|
||||||
|
'bulk-write',
|
||||||
|
{ domain, operations: ops.length },
|
||||||
|
async (ctx) =>
|
||||||
|
await ctx.with(
|
||||||
|
'bulk-write',
|
||||||
|
{ domain },
|
||||||
|
() =>
|
||||||
|
coll.bulkWrite(ops, {
|
||||||
|
ordered: false
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
domain,
|
||||||
|
operations: ops.length
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
if (domainBulk.findUpdate.size > 0) {
|
if (domainBulk.findUpdate.size > 0) {
|
||||||
|
if (promises.length > 0) {
|
||||||
|
await Promise.all(promises)
|
||||||
|
promises = []
|
||||||
|
}
|
||||||
const coll = this.db.collection<Doc>(domain)
|
const coll = this.db.collection<Doc>(domain)
|
||||||
|
|
||||||
await ctx.with(
|
await ctx.with(
|
||||||
@ -1255,6 +1232,10 @@ class MongoAdapter extends MongoAdapterBase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (domainBulk.raw.length > 0) {
|
if (domainBulk.raw.length > 0) {
|
||||||
|
if (promises.length > 0) {
|
||||||
|
await Promise.all(promises)
|
||||||
|
promises = []
|
||||||
|
}
|
||||||
await ctx.with(
|
await ctx.with(
|
||||||
'raw',
|
'raw',
|
||||||
{},
|
{},
|
||||||
@ -1270,6 +1251,9 @@ class MongoAdapter extends MongoAdapterBase {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (promises.length > 0) {
|
||||||
|
await Promise.all(promises)
|
||||||
|
}
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1511,17 +1495,12 @@ class MongoTxAdapter extends MongoAdapterBase implements TxAdapter {
|
|||||||
await this._db.init(DOMAIN_TX)
|
await this._db.init(DOMAIN_TX)
|
||||||
}
|
}
|
||||||
|
|
||||||
txBulk: Tx[] = []
|
override async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
|
||||||
|
if (tx.length === 0) {
|
||||||
async _bulkTx (ctx: MeasureContext): Promise<void> {
|
return []
|
||||||
const txes = this.txBulk
|
|
||||||
this.txBulk = []
|
|
||||||
|
|
||||||
if (txes.length === 0) {
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const opName = txes.length === 1 ? 'tx-one' : 'tx'
|
const opName = tx.length === 1 ? 'tx-one' : 'tx'
|
||||||
await addOperation(
|
await addOperation(
|
||||||
ctx,
|
ctx,
|
||||||
opName,
|
opName,
|
||||||
@ -1532,31 +1511,20 @@ class MongoTxAdapter extends MongoAdapterBase implements TxAdapter {
|
|||||||
{ domain: 'tx' },
|
{ domain: 'tx' },
|
||||||
() =>
|
() =>
|
||||||
this.txCollection().insertMany(
|
this.txCollection().insertMany(
|
||||||
txes.map((it) => translateDoc(it)),
|
tx.map((it) => translateDoc(it)),
|
||||||
{
|
{
|
||||||
ordered: false
|
ordered: false
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
{
|
{
|
||||||
count: txes.length
|
count: tx.length
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
{ domain: 'tx', count: txes.length }
|
{ domain: 'tx', count: tx.length }
|
||||||
)
|
)
|
||||||
ctx.withSync('handleEvent', {}, () => {
|
ctx.withSync('handleEvent', {}, () => {
|
||||||
this.handleEvent(DOMAIN_TX, 'add', txes.length)
|
this.handleEvent(DOMAIN_TX, 'add', tx.length)
|
||||||
})
|
})
|
||||||
}
|
|
||||||
|
|
||||||
override async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
|
|
||||||
if (tx.length === 0) {
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
this.txBulk.push(...tx)
|
|
||||||
|
|
||||||
// We need to wait next cycle to send request
|
|
||||||
await new Promise<void>((resolve) => setImmediate(resolve))
|
|
||||||
await this._bulkTx(ctx)
|
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,24 +3,17 @@
|
|||||||
//
|
//
|
||||||
-->
|
-->
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { Ref, WithLookup, groupByArray } from '@hcengineering/core'
|
import { Ref, WithLookup } from '@hcengineering/core'
|
||||||
import {
|
import { GithubPullRequestReviewState, GithubReview } from '@hcengineering/github'
|
||||||
GithubPullRequestReviewState,
|
|
||||||
GithubReview,
|
|
||||||
GithubReviewComment,
|
|
||||||
GithubReviewThread
|
|
||||||
} from '@hcengineering/github'
|
|
||||||
|
|
||||||
import { ActivityMessageHeader, ActivityMessageTemplate } from '@hcengineering/activity-resources'
|
import { ActivityMessageHeader, ActivityMessageTemplate } from '@hcengineering/activity-resources'
|
||||||
import { Person, PersonAccount } from '@hcengineering/contact'
|
import { Person, PersonAccount } from '@hcengineering/contact'
|
||||||
import { personAccountByIdStore, personByIdStore } from '@hcengineering/contact-resources'
|
import { personAccountByIdStore, personByIdStore } from '@hcengineering/contact-resources'
|
||||||
import { IntlString } from '@hcengineering/platform'
|
import { IntlString } from '@hcengineering/platform'
|
||||||
import { MessageViewer, createQuery } from '@hcengineering/presentation'
|
import { MessageViewer } from '@hcengineering/presentation'
|
||||||
import { Component, PaletteColorIndexes, getPlatformColor, themeStore } from '@hcengineering/ui'
|
|
||||||
import diffview from '@hcengineering/diffview'
|
|
||||||
import github from '../../plugin'
|
|
||||||
import ReviewCommentPresenter from './ReviewCommentPresenter.svelte'
|
|
||||||
import { isEmptyMarkup } from '@hcengineering/text'
|
import { isEmptyMarkup } from '@hcengineering/text'
|
||||||
|
import { PaletteColorIndexes, getPlatformColor, themeStore } from '@hcengineering/ui'
|
||||||
|
import github from '../../plugin'
|
||||||
|
|
||||||
export let value: WithLookup<GithubReview>
|
export let value: WithLookup<GithubReview>
|
||||||
export let showNotify: boolean = false
|
export let showNotify: boolean = false
|
||||||
|
@ -68,7 +68,11 @@
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
async function changeResolution (): Promise<void> {
|
async function changeResolution (): Promise<void> {
|
||||||
await getClient().update(value, { isResolved: !value.isResolved, resolvedBy: null })
|
if (value.isResolved) {
|
||||||
|
await getClient().update(value, { isResolved: false, resolvedBy: null })
|
||||||
|
} else {
|
||||||
|
await getClient().update(value, { isResolved: true, resolvedBy: getCurrentAccount()._id })
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const toRefPersonAccount = (account: Ref<Account>): Ref<PersonAccount> => account as Ref<PersonAccount>
|
const toRefPersonAccount = (account: Ref<Account>): Ref<PersonAccount> => account as Ref<PersonAccount>
|
||||||
|
@ -1057,14 +1057,15 @@ export abstract class IssueSyncManagerBase {
|
|||||||
}
|
}
|
||||||
break
|
break
|
||||||
case task.statusCategory.Won:
|
case task.statusCategory.Won:
|
||||||
if (issueExternal.state !== 'CLOSED') {
|
if (issueExternal.state !== 'CLOSED' || issueExternal.stateReason !== 'COMPLETED') {
|
||||||
issueUpdate.state = 'CLOSED'
|
issueUpdate.state = 'CLOSED'
|
||||||
|
issueUpdate.stateReason = 'COMPLETED'
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
case task.statusCategory.Lost:
|
case task.statusCategory.Lost:
|
||||||
if (issueExternal.state !== 'CLOSED') {
|
if (issueExternal.state !== 'CLOSED' || issueExternal.stateReason !== 'NOT_PLANNED') {
|
||||||
issueUpdate.state = 'CLOSED'
|
issueUpdate.state = 'CLOSED'
|
||||||
// issueUpdate.stateReason = 'not_planed'// Not supported change to github
|
issueUpdate.stateReason = 'not_planed' // Not supported change to github
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -21,9 +21,6 @@ import core, {
|
|||||||
generateId,
|
generateId,
|
||||||
makeCollaborativeDoc
|
makeCollaborativeDoc
|
||||||
} from '@hcengineering/core'
|
} from '@hcengineering/core'
|
||||||
import task, { TaskType, calcRank } from '@hcengineering/task'
|
|
||||||
import tracker, { Issue, IssuePriority } from '@hcengineering/tracker'
|
|
||||||
import { Issue as GithubIssue, IssuesEvent, ProjectsV2ItemEvent } from '@octokit/webhooks-types'
|
|
||||||
import github, {
|
import github, {
|
||||||
DocSyncInfo,
|
DocSyncInfo,
|
||||||
GithubIntegrationRepository,
|
GithubIntegrationRepository,
|
||||||
@ -32,6 +29,9 @@ import github, {
|
|||||||
IntegrationRepositoryData,
|
IntegrationRepositoryData,
|
||||||
GithubIssue as TGithubIssue
|
GithubIssue as TGithubIssue
|
||||||
} from '@hcengineering/github'
|
} from '@hcengineering/github'
|
||||||
|
import task, { TaskType, calcRank } from '@hcengineering/task'
|
||||||
|
import tracker, { Issue, IssuePriority } from '@hcengineering/tracker'
|
||||||
|
import { Issue as GithubIssue, IssuesEvent, ProjectsV2ItemEvent } from '@octokit/webhooks-types'
|
||||||
import { Octokit } from 'octokit'
|
import { Octokit } from 'octokit'
|
||||||
import config from '../config'
|
import config from '../config'
|
||||||
import {
|
import {
|
||||||
@ -667,7 +667,7 @@ export class IssueSyncManager extends IssueSyncManagerBase implements DocSyncMan
|
|||||||
okit: Octokit,
|
okit: Octokit,
|
||||||
account: Ref<Account>
|
account: Ref<Account>
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
const { state, body, ...issueUpdate } = await this.collectIssueUpdate(
|
const { state, stateReason, body, ...issueUpdate } = await this.collectIssueUpdate(
|
||||||
info,
|
info,
|
||||||
existing,
|
existing,
|
||||||
platformUpdate,
|
platformUpdate,
|
||||||
@ -683,6 +683,41 @@ export class IssueSyncManager extends IssueSyncManagerBase implements DocSyncMan
|
|||||||
const hasFieldStateChanges = Object.keys(issueUpdate).length > 0 || state !== undefined
|
const hasFieldStateChanges = Object.keys(issueUpdate).length > 0 || state !== undefined
|
||||||
// We should allow modification from user.
|
// We should allow modification from user.
|
||||||
|
|
||||||
|
const closeIssue = async (): Promise<void> => {
|
||||||
|
await okit?.graphql(
|
||||||
|
`
|
||||||
|
mutation closeIssue($issue: ID!) {
|
||||||
|
closeIssue(input: {
|
||||||
|
issueId: $issue,
|
||||||
|
stateReason: ${stateReason === 'not_planed' ? 'NOT_PLANNED' : 'COMPLETED'}
|
||||||
|
}) {
|
||||||
|
issue {
|
||||||
|
id
|
||||||
|
updatedAt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
{ issue: issueExternal.id }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const reopenIssue = async (): Promise<void> => {
|
||||||
|
await okit?.graphql(
|
||||||
|
`
|
||||||
|
mutation reopenIssue($issue: ID!) {
|
||||||
|
reopenIssue(input: {
|
||||||
|
issueId: $issue
|
||||||
|
}) {
|
||||||
|
issue {
|
||||||
|
id
|
||||||
|
updatedAt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
{ issue: issueExternal.id }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
if (hasFieldStateChanges || body !== undefined) {
|
if (hasFieldStateChanges || body !== undefined) {
|
||||||
if (body !== undefined && !isLocked) {
|
if (body !== undefined && !isLocked) {
|
||||||
await this.ctx.withLog(
|
await this.ctx.withLog(
|
||||||
@ -696,12 +731,15 @@ export class IssueSyncManager extends IssueSyncManagerBase implements DocSyncMan
|
|||||||
workspace: this.provider.getWorkspaceId().name
|
workspace: this.provider.getWorkspaceId().name
|
||||||
})
|
})
|
||||||
if (isGHWriteAllowed()) {
|
if (isGHWriteAllowed()) {
|
||||||
|
if (state === 'OPEN') {
|
||||||
|
// We need to call re-open issue
|
||||||
|
await reopenIssue()
|
||||||
|
}
|
||||||
await okit?.graphql(
|
await okit?.graphql(
|
||||||
`
|
`
|
||||||
mutation updateIssue($issue: ID!, $body: String! ) {
|
mutation updateIssue($issue: ID!, $body: String! ) {
|
||||||
updateIssue(input: {
|
updateIssue(input: {
|
||||||
id: $issue,
|
id: $issue,
|
||||||
${state !== undefined ? `state: ${state as string}` : ''}
|
|
||||||
${gqlp(issueUpdate)},
|
${gqlp(issueUpdate)},
|
||||||
body: $body
|
body: $body
|
||||||
}) {
|
}) {
|
||||||
@ -713,6 +751,9 @@ export class IssueSyncManager extends IssueSyncManagerBase implements DocSyncMan
|
|||||||
}`,
|
}`,
|
||||||
{ issue: issueExternal.id, body }
|
{ issue: issueExternal.id, body }
|
||||||
)
|
)
|
||||||
|
if (state === 'CLOSED') {
|
||||||
|
await closeIssue()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{ url: issueExternal.url, id: existing._id }
|
{ url: issueExternal.url, id: existing._id }
|
||||||
@ -725,12 +766,17 @@ export class IssueSyncManager extends IssueSyncManagerBase implements DocSyncMan
|
|||||||
async () => {
|
async () => {
|
||||||
this.ctx.info('update fields', { ...issueUpdate, workspace: this.provider.getWorkspaceId().name })
|
this.ctx.info('update fields', { ...issueUpdate, workspace: this.provider.getWorkspaceId().name })
|
||||||
if (isGHWriteAllowed()) {
|
if (isGHWriteAllowed()) {
|
||||||
|
const hasOtherChanges = Object.keys(issueUpdate).length > 0
|
||||||
|
if (state === 'OPEN') {
|
||||||
|
// We need to call re-open issue
|
||||||
|
await reopenIssue()
|
||||||
|
}
|
||||||
|
if (hasOtherChanges) {
|
||||||
await okit?.graphql(
|
await okit?.graphql(
|
||||||
`
|
`
|
||||||
mutation updateIssue($issue: ID!) {
|
mutation updateIssue($issue: ID!) {
|
||||||
updateIssue(input: {
|
updateIssue(input: {
|
||||||
id: $issue,
|
id: $issue,
|
||||||
${state !== undefined ? `state: ${state as string}` : ''}
|
|
||||||
${gqlp(issueUpdate)}
|
${gqlp(issueUpdate)}
|
||||||
}) {
|
}) {
|
||||||
issue {
|
issue {
|
||||||
@ -742,6 +788,10 @@ export class IssueSyncManager extends IssueSyncManagerBase implements DocSyncMan
|
|||||||
{ issue: issueExternal.id }
|
{ issue: issueExternal.id }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
if (state === 'CLOSED') {
|
||||||
|
await closeIssue()
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{ url: issueExternal.url }
|
{ url: issueExternal.url }
|
||||||
)
|
)
|
||||||
|
@ -992,7 +992,7 @@ export class PullRequestSyncManager extends IssueSyncManagerBase implements DocS
|
|||||||
okit: Octokit,
|
okit: Octokit,
|
||||||
account: Ref<Account>
|
account: Ref<Account>
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
let { state, body, ...issueUpdate } = await this.collectIssueUpdate(
|
let { state, stateReason, body, ...issueUpdate } = await this.collectIssueUpdate(
|
||||||
info,
|
info,
|
||||||
existing,
|
existing,
|
||||||
platformUpdate,
|
platformUpdate,
|
||||||
|
@ -204,7 +204,9 @@ export class ReviewThreadSyncManager implements DocSyncManager {
|
|||||||
case 'resolved':
|
case 'resolved':
|
||||||
case 'unresolved': {
|
case 'unresolved': {
|
||||||
const isResolved = event.action === 'resolved'
|
const isResolved = event.action === 'resolved'
|
||||||
const reviewData = await this.client.findOne(github.class.DocSyncInfo, { url: event.thread.node_id })
|
const reviewData = await this.client.findOne(github.class.DocSyncInfo, {
|
||||||
|
url: event.thread.node_id.toLocaleLowerCase()
|
||||||
|
})
|
||||||
|
|
||||||
if (reviewData !== undefined) {
|
if (reviewData !== undefined) {
|
||||||
const reviewObj: GithubReviewThread | undefined = await this.client.findOne<GithubReviewThread>(
|
const reviewObj: GithubReviewThread | undefined = await this.client.findOne<GithubReviewThread>(
|
||||||
@ -225,12 +227,12 @@ export class ReviewThreadSyncManager implements DocSyncManager {
|
|||||||
},
|
},
|
||||||
lastModified
|
lastModified
|
||||||
)
|
)
|
||||||
await this.client.update(
|
await this.client.diffUpdate(
|
||||||
reviewObj,
|
reviewObj,
|
||||||
{
|
{
|
||||||
isResolved
|
isResolved,
|
||||||
|
resolvedBy: account
|
||||||
},
|
},
|
||||||
false,
|
|
||||||
lastModified,
|
lastModified,
|
||||||
account
|
account
|
||||||
)
|
)
|
||||||
|
143
tests/sanity/tests/documents/documents-content.spec.ts
Normal file
143
tests/sanity/tests/documents/documents-content.spec.ts
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
import { test, type Page, expect } from '@playwright/test'
|
||||||
|
import {
|
||||||
|
generateId,
|
||||||
|
getTimeForPlanner,
|
||||||
|
generateUser,
|
||||||
|
createAccountAndWorkspace,
|
||||||
|
createAccount,
|
||||||
|
getInviteLink,
|
||||||
|
generateTestData,
|
||||||
|
getSecondPageByInvite
|
||||||
|
} from '../utils'
|
||||||
|
import { NewDocument, NewTeamspace } from '../model/documents/types'
|
||||||
|
import { LeftSideMenuPage } from '../model/left-side-menu-page'
|
||||||
|
import { DocumentsPage } from '../model/documents/documents-page'
|
||||||
|
import { DocumentContentPage } from '../model/documents/document-content-page'
|
||||||
|
import { PlanningNavigationMenuPage } from '../model/planning/planning-navigation-menu-page'
|
||||||
|
import { PlanningPage } from '../model/planning/planning-page'
|
||||||
|
import { SignUpData } from '../model/common-types'
|
||||||
|
import { TestData } from '../chat/types'
|
||||||
|
|
||||||
|
const retryOptions = { intervals: [1000, 1500, 2500], timeout: 60000 }
|
||||||
|
|
||||||
|
test.describe('Content in the Documents tests', () => {
|
||||||
|
let testData: TestData
|
||||||
|
let newUser2: SignUpData
|
||||||
|
let testTeamspace: NewTeamspace
|
||||||
|
let testDocument: NewDocument
|
||||||
|
|
||||||
|
let leftSideMenuPage: LeftSideMenuPage
|
||||||
|
let documentsPage: DocumentsPage
|
||||||
|
let documentContentPage: DocumentContentPage
|
||||||
|
|
||||||
|
let secondPage: Page
|
||||||
|
let leftSideMenuSecondPage: LeftSideMenuPage
|
||||||
|
let documentsSecondPage: DocumentsPage
|
||||||
|
let documentContentSecondPage: DocumentContentPage
|
||||||
|
|
||||||
|
test.beforeEach(async ({ browser, page, request }) => {
|
||||||
|
leftSideMenuPage = new LeftSideMenuPage(page)
|
||||||
|
documentsPage = new DocumentsPage(page)
|
||||||
|
documentContentPage = new DocumentContentPage(page)
|
||||||
|
testTeamspace = {
|
||||||
|
title: `Teamspace-${generateId()}`,
|
||||||
|
description: 'Teamspace description',
|
||||||
|
autoJoin: true
|
||||||
|
}
|
||||||
|
testDocument = {
|
||||||
|
title: `Document-${generateId()}`,
|
||||||
|
space: testTeamspace.title
|
||||||
|
}
|
||||||
|
|
||||||
|
testData = generateTestData()
|
||||||
|
newUser2 = generateUser()
|
||||||
|
await createAccountAndWorkspace(page, request, testData)
|
||||||
|
await createAccount(request, newUser2)
|
||||||
|
|
||||||
|
const linkText = await getInviteLink(page)
|
||||||
|
await leftSideMenuPage.clickDocuments()
|
||||||
|
await documentsPage.checkTeamspaceNotExist(testTeamspace.title)
|
||||||
|
await documentsPage.createNewTeamspace(testTeamspace)
|
||||||
|
secondPage = await getSecondPageByInvite(browser, linkText, newUser2)
|
||||||
|
|
||||||
|
leftSideMenuSecondPage = new LeftSideMenuPage(secondPage)
|
||||||
|
documentsSecondPage = new DocumentsPage(secondPage)
|
||||||
|
documentContentSecondPage = new DocumentContentPage(secondPage)
|
||||||
|
await documentsPage.clickOnButtonCreateDocument()
|
||||||
|
await documentsPage.createDocument(testDocument)
|
||||||
|
await documentsPage.openDocument(testDocument.title)
|
||||||
|
await documentContentPage.checkDocumentTitle(testDocument.title)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('ToDos in the Document', async () => {
|
||||||
|
const contents: string[] = ['work', 'meet up']
|
||||||
|
let content: string = ''
|
||||||
|
|
||||||
|
for (let i = 0; i < contents.length; i++) {
|
||||||
|
content = await documentContentPage.addContentToTheNewLine(`${i === 0 ? '[] ' : ''}${contents[i]}`)
|
||||||
|
await documentContentPage.checkContent(content)
|
||||||
|
}
|
||||||
|
for (const line of contents) {
|
||||||
|
await documentContentPage.assignToDo(`${newUser2.lastName} ${newUser2.firstName}`, line)
|
||||||
|
}
|
||||||
|
|
||||||
|
await leftSideMenuSecondPage.clickDocuments()
|
||||||
|
await documentsSecondPage.openTeamspace(testDocument.space)
|
||||||
|
await documentsSecondPage.openDocument(testDocument.title)
|
||||||
|
await documentContentSecondPage.checkDocumentTitle(testDocument.title)
|
||||||
|
await documentContentSecondPage.checkContent(content)
|
||||||
|
await leftSideMenuSecondPage.clickPlanner()
|
||||||
|
|
||||||
|
const planningNavigationMenuPage = new PlanningNavigationMenuPage(secondPage)
|
||||||
|
await planningNavigationMenuPage.clickOnButtonToDoAll()
|
||||||
|
const planningPage = new PlanningPage(secondPage)
|
||||||
|
const time: string = getTimeForPlanner()
|
||||||
|
await planningPage.dragToCalendar(contents[0], 1, time)
|
||||||
|
await planningPage.dragToCalendar(contents[1], 1, time, true)
|
||||||
|
await planningPage.checkInSchedule(contents[0])
|
||||||
|
await planningPage.checkInSchedule(contents[1])
|
||||||
|
await planningPage.markDoneInToDos(contents[0])
|
||||||
|
await planningPage.markDoneInToDos(contents[1])
|
||||||
|
await secondPage.close()
|
||||||
|
|
||||||
|
for (const line of contents) await documentContentPage.checkToDo(line, true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('Table in the Document', async ({ page }) => {
|
||||||
|
await documentContentPage.inputContentParapraph().click()
|
||||||
|
await documentContentPage.leftMenu().click()
|
||||||
|
await documentContentPage.menuPopupItemButton('Table').click()
|
||||||
|
await documentContentPage.menuPopupItemButton('1x2').first().click()
|
||||||
|
await documentContentPage.proseTableCell(0, 0).fill('One')
|
||||||
|
await documentContentPage.proseTableCell(0, 1).fill('Two')
|
||||||
|
await documentContentPage.buttonInsertColumn().click()
|
||||||
|
await documentContentPage.proseTableCell(0, 1).fill('Three')
|
||||||
|
|
||||||
|
await documentContentPage.proseTableColumnHandle(1).hover()
|
||||||
|
await expect(async () => {
|
||||||
|
await page.mouse.down()
|
||||||
|
const boundingBox = await documentContentPage.proseTableCell(0, 1).boundingBox()
|
||||||
|
expect(boundingBox).toBeTruthy()
|
||||||
|
if (boundingBox != null) {
|
||||||
|
await page.mouse.move(boundingBox.x + boundingBox.width * 2, boundingBox.y - 5)
|
||||||
|
await page.mouse.move(boundingBox.x + boundingBox.width * 2 + 5, boundingBox.y - 5)
|
||||||
|
await page.mouse.up()
|
||||||
|
}
|
||||||
|
}).toPass(retryOptions)
|
||||||
|
|
||||||
|
await documentContentPage.buttonInsertLastRow().click()
|
||||||
|
await documentContentPage.proseTableCell(1, 1).fill('Bottom')
|
||||||
|
await documentContentPage.buttonInsertInnerRow().click()
|
||||||
|
await documentContentPage.proseTableCell(1, 1).fill('Middle')
|
||||||
|
|
||||||
|
await leftSideMenuSecondPage.clickDocuments()
|
||||||
|
await documentsSecondPage.openTeamspace(testDocument.space)
|
||||||
|
await documentsSecondPage.openDocument(testDocument.title)
|
||||||
|
await documentContentSecondPage.checkDocumentTitle(testDocument.title)
|
||||||
|
await expect(documentContentSecondPage.proseTableCell(1, 1)).toContainText('Middle')
|
||||||
|
await documentContentSecondPage.proseTableCell(1, 1).dblclick()
|
||||||
|
await documentContentSecondPage.proseTableCell(1, 1).fill('Center')
|
||||||
|
await expect(documentContentPage.proseTableCell(1, 1)).toContainText('Center', { timeout: 5000 })
|
||||||
|
await secondPage.close()
|
||||||
|
})
|
||||||
|
})
|
@ -1,26 +1,9 @@
|
|||||||
import { test } from '@playwright/test'
|
import { test } from '@playwright/test'
|
||||||
import {
|
import { generateId, getSecondPage, PlatformSetting, PlatformURI } from '../utils'
|
||||||
generateId,
|
|
||||||
getSecondPage,
|
|
||||||
PlatformSetting,
|
|
||||||
PlatformURI,
|
|
||||||
getTimeForPlanner,
|
|
||||||
generateUser,
|
|
||||||
createAccountAndWorkspace,
|
|
||||||
createAccount,
|
|
||||||
getInviteLink,
|
|
||||||
generateTestData,
|
|
||||||
reLogin
|
|
||||||
} from '../utils'
|
|
||||||
import { NewDocument, NewTeamspace } from '../model/documents/types'
|
import { NewDocument, NewTeamspace } from '../model/documents/types'
|
||||||
import { LeftSideMenuPage } from '../model/left-side-menu-page'
|
import { LeftSideMenuPage } from '../model/left-side-menu-page'
|
||||||
import { DocumentsPage } from '../model/documents/documents-page'
|
import { DocumentsPage } from '../model/documents/documents-page'
|
||||||
import { DocumentContentPage } from '../model/documents/document-content-page'
|
import { DocumentContentPage } from '../model/documents/document-content-page'
|
||||||
import { PlanningNavigationMenuPage } from '../model/planning/planning-navigation-menu-page'
|
|
||||||
import { PlanningPage } from '../model/planning/planning-page'
|
|
||||||
import { SignUpData } from '../model/common-types'
|
|
||||||
import { SignInJoinPage } from '../model/signin-page'
|
|
||||||
import { TestData } from '../chat/types'
|
|
||||||
|
|
||||||
test.use({
|
test.use({
|
||||||
storageState: PlatformSetting
|
storageState: PlatformSetting
|
||||||
@ -186,69 +169,4 @@ test.describe('Documents tests', () => {
|
|||||||
await documentContentPage.checkDocumentTitle(newDocument.title)
|
await documentContentPage.checkDocumentTitle(newDocument.title)
|
||||||
await documentContentPage.checkDocumentLocked()
|
await documentContentPage.checkDocumentLocked()
|
||||||
})
|
})
|
||||||
|
|
||||||
test('ToDos in the Document', async ({ page, browser, request }) => {
|
|
||||||
const testData: TestData = generateTestData()
|
|
||||||
await createAccountAndWorkspace(page, request, testData)
|
|
||||||
const newUser2: SignUpData = generateUser()
|
|
||||||
await createAccount(request, newUser2)
|
|
||||||
|
|
||||||
const todosTeamspace: NewTeamspace = {
|
|
||||||
title: `ToDos Teamspace-${generateId()}`,
|
|
||||||
description: 'ToDos Teamspace description',
|
|
||||||
autoJoin: true
|
|
||||||
}
|
|
||||||
const todosDocument: NewDocument = {
|
|
||||||
title: `ToDos in the Document-${generateId()}`,
|
|
||||||
space: todosTeamspace.title
|
|
||||||
}
|
|
||||||
const contents: string[] = ['work', 'meet up']
|
|
||||||
let content: string = ''
|
|
||||||
|
|
||||||
const linkText = await getInviteLink(page)
|
|
||||||
await leftSideMenuPage.clickDocuments()
|
|
||||||
await documentsPage.checkTeamspaceNotExist(todosTeamspace.title)
|
|
||||||
await documentsPage.createNewTeamspace(todosTeamspace)
|
|
||||||
const page2 = await browser.newPage()
|
|
||||||
await page2.goto(linkText ?? '')
|
|
||||||
const joinPage: SignInJoinPage = new SignInJoinPage(page2)
|
|
||||||
await joinPage.join(newUser2)
|
|
||||||
await page2.goto(`${PlatformURI}/workbench/sanity-ws`)
|
|
||||||
|
|
||||||
await documentsPage.clickOnButtonCreateDocument()
|
|
||||||
await documentsPage.createDocument(todosDocument)
|
|
||||||
await documentsPage.openDocument(todosDocument.title)
|
|
||||||
await documentContentPage.checkDocumentTitle(todosDocument.title)
|
|
||||||
for (let i = 0; i < contents.length; i++) {
|
|
||||||
content = await documentContentPage.addContentToTheNewLine(`${i === 0 ? '[] ' : ''}${contents[i]}`)
|
|
||||||
await documentContentPage.checkContent(content)
|
|
||||||
}
|
|
||||||
for (const line of contents) {
|
|
||||||
await documentContentPage.assignToDo(`${newUser2.lastName} ${newUser2.firstName}`, line)
|
|
||||||
}
|
|
||||||
|
|
||||||
await reLogin(page2, { ...testData, userName: newUser2.email })
|
|
||||||
const leftSideMenuPageSecond = new LeftSideMenuPage(page2)
|
|
||||||
await leftSideMenuPageSecond.clickDocuments()
|
|
||||||
const documentsPageSecond = new DocumentsPage(page2)
|
|
||||||
await documentsPageSecond.openTeamspace(todosDocument.space)
|
|
||||||
await documentsPageSecond.openDocument(todosDocument.title)
|
|
||||||
const documentContentPageSecond = new DocumentContentPage(page2)
|
|
||||||
await documentContentPageSecond.checkDocumentTitle(todosDocument.title)
|
|
||||||
await documentContentPageSecond.checkContent(content)
|
|
||||||
await leftSideMenuPageSecond.clickPlanner()
|
|
||||||
|
|
||||||
const planningNavigationMenuPage = new PlanningNavigationMenuPage(page2)
|
|
||||||
await planningNavigationMenuPage.clickOnButtonToDoAll()
|
|
||||||
const planningPage = new PlanningPage(page2)
|
|
||||||
const time: string = getTimeForPlanner()
|
|
||||||
await planningPage.dragToCalendar(contents[0], 1, time)
|
|
||||||
await planningPage.dragToCalendar(contents[1], 1, time, true)
|
|
||||||
await planningPage.checkInSchedule(contents[0])
|
|
||||||
await planningPage.checkInSchedule(contents[1])
|
|
||||||
await planningPage.markDoneInToDos(contents[0])
|
|
||||||
await planningPage.markDoneInToDos(contents[1])
|
|
||||||
for (const line of contents) await documentContentPage.checkToDo(line, true)
|
|
||||||
await page2.close()
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
@ -11,6 +11,23 @@ export class DocumentContentPage extends CommonPage {
|
|||||||
|
|
||||||
readonly buttonDocumentTitle = (): Locator => this.page.locator('div[class*="main-content"] div.title input')
|
readonly buttonDocumentTitle = (): Locator => this.page.locator('div[class*="main-content"] div.title input')
|
||||||
readonly inputContent = (): Locator => this.page.locator('div.textInput div.tiptap')
|
readonly inputContent = (): Locator => this.page.locator('div.textInput div.tiptap')
|
||||||
|
readonly inputContentParapraph = (): Locator => this.page.locator('div.textInput div.tiptap > p')
|
||||||
|
readonly leftMenu = (): Locator => this.page.locator('div.tiptap-left-menu')
|
||||||
|
readonly proseTableCell = (row: number, col: number): Locator =>
|
||||||
|
this.page.locator('table.proseTable').locator('tr').nth(row).locator('td').nth(col).locator('p')
|
||||||
|
|
||||||
|
readonly proseTableColumnHandle = (col: number): Locator =>
|
||||||
|
this.page.locator('table.proseTable').locator('tr').first().locator('td').nth(col).locator('div.table-col-handle')
|
||||||
|
|
||||||
|
readonly buttonInsertColumn = (col: number = 0): Locator =>
|
||||||
|
this.page.locator('div.table-col-insert').nth(col).locator('button')
|
||||||
|
|
||||||
|
readonly buttonInsertLastRow = (): Locator =>
|
||||||
|
this.page.locator('table.proseTable + div.table-button-container__col + div.table-button-container__row')
|
||||||
|
|
||||||
|
readonly buttonInsertInnerRow = (row: number = 0): Locator =>
|
||||||
|
this.page.locator('table.proseTable').locator('tr').nth(row).locator('div.table-row-insert button')
|
||||||
|
|
||||||
readonly buttonToolbarLink = (): Locator => this.page.locator('div.text-editor-toolbar button[data-id="btnLink"]')
|
readonly buttonToolbarLink = (): Locator => this.page.locator('div.text-editor-toolbar button[data-id="btnLink"]')
|
||||||
readonly inputFormLink = (): Locator => this.page.locator('form[id="text-editor:string:Link"] input')
|
readonly inputFormLink = (): Locator => this.page.locator('form[id="text-editor:string:Link"] input')
|
||||||
readonly buttonFormLinkSave = (): Locator =>
|
readonly buttonFormLinkSave = (): Locator =>
|
||||||
|
@ -8,6 +8,7 @@ import { SignUpData } from './model/common-types'
|
|||||||
import { ApiEndpoint } from './API/Api'
|
import { ApiEndpoint } from './API/Api'
|
||||||
import { SelectWorkspacePage } from './model/select-workspace-page'
|
import { SelectWorkspacePage } from './model/select-workspace-page'
|
||||||
import { LoginPage } from './model/login-page'
|
import { LoginPage } from './model/login-page'
|
||||||
|
import { SignInJoinPage } from './model/signin-page'
|
||||||
|
|
||||||
export const PlatformURI = process.env.PLATFORM_URI as string
|
export const PlatformURI = process.env.PLATFORM_URI as string
|
||||||
export const PlatformTransactor = process.env.PLATFORM_TRANSACTOR as string
|
export const PlatformTransactor = process.env.PLATFORM_TRANSACTOR as string
|
||||||
@ -109,6 +110,19 @@ export async function getSecondPage (browser: Browser): Promise<{ page: Page, co
|
|||||||
const userSecondContext = await browser.newContext({ storageState: PlatformSettingSecond })
|
const userSecondContext = await browser.newContext({ storageState: PlatformSettingSecond })
|
||||||
return { page: await userSecondContext.newPage(), context: userSecondContext }
|
return { page: await userSecondContext.newPage(), context: userSecondContext }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function getSecondPageByInvite (
|
||||||
|
browser: Browser,
|
||||||
|
linkText: string | null,
|
||||||
|
newUser: SignUpData
|
||||||
|
): Promise<Page> {
|
||||||
|
const page = await browser.newPage()
|
||||||
|
await page.goto(linkText ?? '')
|
||||||
|
const joinPage: SignInJoinPage = new SignInJoinPage(page)
|
||||||
|
await joinPage.join(newUser)
|
||||||
|
return page
|
||||||
|
}
|
||||||
|
|
||||||
export function expectToContainsOrdered (val: Locator, text: string[], timeout?: number): Promise<void> {
|
export function expectToContainsOrdered (val: Locator, text: string[], timeout?: number): Promise<void> {
|
||||||
const origIssuesExp = new RegExp('.*' + text.join('.*') + '.*')
|
const origIssuesExp = new RegExp('.*' + text.join('.*') + '.*')
|
||||||
return expect(val).toHaveText(origIssuesExp, { timeout })
|
return expect(val).toHaveText(origIssuesExp, { timeout })
|
||||||
|
Loading…
Reference in New Issue
Block a user