From 3116af99ba11cac24a046364f93bdedb76b07f0f Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Tue, 4 Mar 2025 15:05:59 +0700 Subject: [PATCH] UBERF-9543: Restore-all tool (#8132) Signed-off-by: Andrey Sobolev --- .vscode/launch.json | 20 +++++++ dev/tool/src/index.ts | 74 +++++++++++++++++++++---- dev/tool/src/workspace.ts | 67 +++++++++++++++++++++- server/backup/src/service.ts | 2 +- server/workspace-service/src/index.ts | 4 ++ server/workspace-service/src/service.ts | 8 +-- 6 files changed, 156 insertions(+), 19 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index e4d2215fa1..ead314dae7 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -536,6 +536,26 @@ "cwd": "${workspaceRoot}/dev/tool", "protocol": "inspector" }, + { + "name": "Debug restore-all tool", + "type": "node", + "request": "launch", + "args": ["src/__start.ts", "restore-all"], + "env": { + "ACCOUNTS_URL": "http://localhost:3000", + "STORAGE": "minio|localhost?accessKey=minioadmin&secretKey=minioadmin", + // "DB_URL": "mongodb://localhost:27017", + "DB_URL": "postgresql://root@host.docker.internal:26258/defaultdb?sslmode=disable", + "SERVER_SECRET": "secret", + "BUCKET_NAME":"dev-backups" + }, + "runtimeArgs": ["--nolazy", "-r", "ts-node/register"], + "showAsyncStacks": true, + "sourceMaps": true, + "outputCapture": "std", + "cwd": "${workspaceRoot}/dev/tool", + "protocol": "inspector" + }, { "name": "Debug Github integration", "type": "node", diff --git a/dev/tool/src/index.ts b/dev/tool/src/index.ts index 3966a8c92d..9bd8b9c4db 100644 --- a/dev/tool/src/index.ts +++ b/dev/tool/src/index.ts @@ -619,19 +619,69 @@ export function devTool ( // const destroyer = getWorkspaceDestroyAdapter(dbUrl) - // await destroyer.deleteWorkspace(toolCtx, sharedPipelineContextVars, { name: ws.workspace }) - // } - // } - // ) - // } catch (err: any) { - // toolCtx.error('Failed to backup/archive workspace', { workspace: ws.workspace }) - // } - // } - // } - // console.log('Processed unused workspaces', unused) - // }) + // program + // .command('restore-all') + // .description('Restore workspaces to selected region DB...') + // .option('-t|--timeout [timeout]', 'Timeout in days', '60') + // .option('-r|--region [region]', 'Timeout in days', '') + // .option('-w|--workspace [workspace]', 'Force backup of selected workspace', '') + // .option('-d|--dry [dry]', 'Dry run', false) + // .action(async (cmd: { timeout: string, workspace: string, region: string, dry: boolean, account: string }) => { + // const { txes, dbUrl } = prepareTools() + + // const bucketName = process.env.BUCKET_NAME + // if (bucketName === '' || bucketName == null) { + // console.error('please provide butket name env') + // process.exit(1) // } - // ) + + // const token = generateToken(systemAccountEmail, getWorkspaceId('')) + // const workspaces = (await listAccountWorkspaces(token, cmd.region)) + // .sort((a, b) => { + // const bsize = b.backupInfo?.backupSize ?? 0 + // const asize = a.backupInfo?.backupSize ?? 0 + // return bsize - asize + // }) + // .filter((it) => cmd.workspace === '' || cmd.workspace === it.workspace) + + // for (const ws of workspaces) { + // const lastVisitDays = Math.floor((Date.now() - ws.lastVisit) / 1000 / 3600 / 24) + + // toolCtx.warn('--- restoring workspace', { + // url: ws.workspaceUrl, + // id: ws.workspace, + // lastVisitDays, + // backupSize: ws.backupInfo?.blobsSize ?? 0, + // mode: ws.mode + // }) + // if (cmd.dry) { + // continue + // } + // try { + // const st = Date.now() + // await backupRestore( + // toolCtx, + // dbUrl, + // bucketName, + // ws, + // (dbUrl, storageAdapter) => { + // const factory: PipelineFactory = createBackupPipeline(toolCtx, dbUrl, txes, { + // externalStorage: storageAdapter, + // usePassedCtx: true + // }) + // return factory + // }, + // [DOMAIN_BLOB] + // ) + // const ed = Date.now() + // toolCtx.warn('--- restoring complete', { + // time: ed - st + // }) + // } catch (err: any) { + // toolCtx.error('REstore of f workspace failedarchive workspace', { workspace: ws.workspace }) + // } + // } + // }) // program // .command('backup-all') diff --git a/dev/tool/src/workspace.ts b/dev/tool/src/workspace.ts index e790dbc1fe..89737e8c96 100644 --- a/dev/tool/src/workspace.ts +++ b/dev/tool/src/workspace.ts @@ -15,16 +15,27 @@ // import core, { + DOMAIN_TX, + getWorkspaceId, type BackupClient, + type BaseWorkspaceInfo, type Class, type Client as CoreClient, type Doc, - DOMAIN_TX, + type MeasureContext, type Ref, type Tx, type WorkspaceUuid } from '@hcengineering/core' import { getMongoClient, getWorkspaceMongoDB } from '@hcengineering/mongo' +import { createStorageBackupStorage, restore } from '@hcengineering/server-backup' +import { + createDummyStorageAdapter, + wrapPipeline, + type PipelineFactory, + type StorageAdapter +} from '@hcengineering/server-core' +import { createStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage' import { connect } from '@hcengineering/server-tool' import { generateModelDiff, printDiff } from './mdiff' @@ -104,3 +115,57 @@ export async function updateField ( await connection.close() } } + +export async function backupRestore ( + ctx: MeasureContext, + dbURL: string, + bucketName: string, + workspace: BaseWorkspaceInfo, + pipelineFactoryFactory: (mongoUrl: string, storage: StorageAdapter) => PipelineFactory, + skipDomains: string[] +): Promise { + const storageEnv = process.env.STORAGE + if (storageEnv === undefined) { + console.error('please provide STORAGE env') + process.exit(1) + } + if (bucketName.trim() === '') { + console.error('please provide butket name env') + process.exit(1) + } + const backupStorageConfig = storageConfigFromEnv(storageEnv) + + const storageAdapter = createStorageFromConfig(backupStorageConfig.storages[0]) + + const workspaceStorage = createDummyStorageAdapter() + const pipelineFactory = pipelineFactoryFactory(dbURL, workspaceStorage) + + try { + const storage = await createStorageBackupStorage( + ctx, + storageAdapter, + getWorkspaceId(bucketName), + workspace.workspace + ) + const wsUrl: WorkspaceIdWithUrl = { + name: workspace.workspace, + uuid: workspace.uuid, + workspaceName: workspace.workspaceName ?? '', + workspaceUrl: workspace.workspaceUrl ?? '' + } + const result: boolean = await ctx.with('restore', { workspace: workspace.workspace }, (ctx) => + restore(ctx, '', getWorkspaceId(workspace.workspace), storage, { + date: -1, + skip: new Set(skipDomains), + recheck: false, + storageAdapter: workspaceStorage, + getConnection: async () => { + return wrapPipeline(ctx, await pipelineFactory(ctx, wsUrl, true, () => {}, null), wsUrl) + } + }) + ) + return result + } finally { + await storageAdapter.close() + } +} diff --git a/server/backup/src/service.ts b/server/backup/src/service.ts index c2818832ff..2025d50ad3 100644 --- a/server/backup/src/service.ts +++ b/server/backup/src/service.ts @@ -449,7 +449,7 @@ export async function doRestoreWorkspace ( restore(ctx, '', wsIds, storage, { date: -1, skip: new Set(skipDomains), - recheck: true, + recheck: false, // Do not need to recheck storageAdapter: workspaceStorageAdapter, cleanIndexState, getConnection: async () => { diff --git a/server/workspace-service/src/index.ts b/server/workspace-service/src/index.ts index 6e55803b54..d54309e2da 100644 --- a/server/workspace-service/src/index.ts +++ b/server/workspace-service/src/index.ts @@ -63,6 +63,10 @@ export function serveWorkspaceAccount ( process.exit(1) } + if (process.env.MIGRATION_CLEANUP !== 'true') { + console.log('Migration cleanup is not set, so move to regions will not clean old DB.') + } + const backup = wsOperation === 'all+backup' ? { diff --git a/server/workspace-service/src/service.ts b/server/workspace-service/src/service.ts index de1ceec1b0..b47fb0ac86 100644 --- a/server/workspace-service/src/service.ts +++ b/server/workspace-service/src/service.ts @@ -20,6 +20,7 @@ import { isRestoringMode, systemAccountUuid, type BrandingMap, + DOMAIN_BLOB, type Data, type MeasureContext, type Tx, @@ -68,8 +69,6 @@ import { import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage' import { createWorkspace, upgradeWorkspace } from './ws-operations' -const dbCleanTreshold = 256 // Cleanup workspaces if less 256mb - export interface WorkspaceOptions { errorHandler: (workspace: WorkspaceInfoWithStatus, error: any) => Promise force: boolean @@ -527,8 +526,7 @@ export class WorkspaceWorker { await sendEvent('migrate-clean-started', 0) await this.sendTransactorMaitenance(token, workspace.uuid) - const sz = workspace.backupInfo?.backupSize ?? 0 - if (sz <= dbCleanTreshold) { + if (process.env.MIGRATION_CLEANUP === 'true') { try { await this.doCleanup(ctx, workspace, false) } catch (err: any) { @@ -701,7 +699,7 @@ export class WorkspaceWorker { opt.backup.bucketName, pipelineFactory, workspaceStorageAdapter, - ['blob'], + [DOMAIN_BLOB], true, (_p: number) => { if (progress !== Math.round(_p)) {