UBERF-9543: Restore-all tool (#8132)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2025-03-04 15:05:59 +07:00 committed by Andrey Sobolev
parent 805bf254c9
commit 3116af99ba
No known key found for this signature in database
GPG Key ID: BD80F68D68D8F7F2
6 changed files with 156 additions and 19 deletions

20
.vscode/launch.json vendored
View File

@ -536,6 +536,26 @@
"cwd": "${workspaceRoot}/dev/tool", "cwd": "${workspaceRoot}/dev/tool",
"protocol": "inspector" "protocol": "inspector"
}, },
{
"name": "Debug restore-all tool",
"type": "node",
"request": "launch",
"args": ["src/__start.ts", "restore-all"],
"env": {
"ACCOUNTS_URL": "http://localhost:3000",
"STORAGE": "minio|localhost?accessKey=minioadmin&secretKey=minioadmin",
// "DB_URL": "mongodb://localhost:27017",
"DB_URL": "postgresql://root@host.docker.internal:26258/defaultdb?sslmode=disable",
"SERVER_SECRET": "secret",
"BUCKET_NAME":"dev-backups"
},
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
"showAsyncStacks": true,
"sourceMaps": true,
"outputCapture": "std",
"cwd": "${workspaceRoot}/dev/tool",
"protocol": "inspector"
},
{ {
"name": "Debug Github integration", "name": "Debug Github integration",
"type": "node", "type": "node",

View File

@ -619,19 +619,69 @@ export function devTool (
// const destroyer = getWorkspaceDestroyAdapter(dbUrl) // const destroyer = getWorkspaceDestroyAdapter(dbUrl)
// await destroyer.deleteWorkspace(toolCtx, sharedPipelineContextVars, { name: ws.workspace }) // program
// } // .command('restore-all')
// } // .description('Restore workspaces to selected region DB...')
// ) // .option('-t|--timeout [timeout]', 'Timeout in days', '60')
// } catch (err: any) { // .option('-r|--region [region]', 'Timeout in days', '')
// toolCtx.error('Failed to backup/archive workspace', { workspace: ws.workspace }) // .option('-w|--workspace [workspace]', 'Force backup of selected workspace', '')
// } // .option('-d|--dry [dry]', 'Dry run', false)
// } // .action(async (cmd: { timeout: string, workspace: string, region: string, dry: boolean, account: string }) => {
// } // const { txes, dbUrl } = prepareTools()
// console.log('Processed unused workspaces', unused)
// }) // const bucketName = process.env.BUCKET_NAME
// if (bucketName === '' || bucketName == null) {
// console.error('please provide butket name env')
// process.exit(1)
// } // }
// )
// const token = generateToken(systemAccountEmail, getWorkspaceId(''))
// const workspaces = (await listAccountWorkspaces(token, cmd.region))
// .sort((a, b) => {
// const bsize = b.backupInfo?.backupSize ?? 0
// const asize = a.backupInfo?.backupSize ?? 0
// return bsize - asize
// })
// .filter((it) => cmd.workspace === '' || cmd.workspace === it.workspace)
// for (const ws of workspaces) {
// const lastVisitDays = Math.floor((Date.now() - ws.lastVisit) / 1000 / 3600 / 24)
// toolCtx.warn('--- restoring workspace', {
// url: ws.workspaceUrl,
// id: ws.workspace,
// lastVisitDays,
// backupSize: ws.backupInfo?.blobsSize ?? 0,
// mode: ws.mode
// })
// if (cmd.dry) {
// continue
// }
// try {
// const st = Date.now()
// await backupRestore(
// toolCtx,
// dbUrl,
// bucketName,
// ws,
// (dbUrl, storageAdapter) => {
// const factory: PipelineFactory = createBackupPipeline(toolCtx, dbUrl, txes, {
// externalStorage: storageAdapter,
// usePassedCtx: true
// })
// return factory
// },
// [DOMAIN_BLOB]
// )
// const ed = Date.now()
// toolCtx.warn('--- restoring complete', {
// time: ed - st
// })
// } catch (err: any) {
// toolCtx.error('REstore of f workspace failedarchive workspace', { workspace: ws.workspace })
// }
// }
// })
// program // program
// .command('backup-all') // .command('backup-all')

View File

@ -15,16 +15,27 @@
// //
import core, { import core, {
DOMAIN_TX,
getWorkspaceId,
type BackupClient, type BackupClient,
type BaseWorkspaceInfo,
type Class, type Class,
type Client as CoreClient, type Client as CoreClient,
type Doc, type Doc,
DOMAIN_TX, type MeasureContext,
type Ref, type Ref,
type Tx, type Tx,
type WorkspaceUuid type WorkspaceUuid
} from '@hcengineering/core' } from '@hcengineering/core'
import { getMongoClient, getWorkspaceMongoDB } from '@hcengineering/mongo' import { getMongoClient, getWorkspaceMongoDB } from '@hcengineering/mongo'
import { createStorageBackupStorage, restore } from '@hcengineering/server-backup'
import {
createDummyStorageAdapter,
wrapPipeline,
type PipelineFactory,
type StorageAdapter
} from '@hcengineering/server-core'
import { createStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
import { connect } from '@hcengineering/server-tool' import { connect } from '@hcengineering/server-tool'
import { generateModelDiff, printDiff } from './mdiff' import { generateModelDiff, printDiff } from './mdiff'
@ -104,3 +115,57 @@ export async function updateField (
await connection.close() await connection.close()
} }
} }
export async function backupRestore (
ctx: MeasureContext,
dbURL: string,
bucketName: string,
workspace: BaseWorkspaceInfo,
pipelineFactoryFactory: (mongoUrl: string, storage: StorageAdapter) => PipelineFactory,
skipDomains: string[]
): Promise<boolean> {
const storageEnv = process.env.STORAGE
if (storageEnv === undefined) {
console.error('please provide STORAGE env')
process.exit(1)
}
if (bucketName.trim() === '') {
console.error('please provide butket name env')
process.exit(1)
}
const backupStorageConfig = storageConfigFromEnv(storageEnv)
const storageAdapter = createStorageFromConfig(backupStorageConfig.storages[0])
const workspaceStorage = createDummyStorageAdapter()
const pipelineFactory = pipelineFactoryFactory(dbURL, workspaceStorage)
try {
const storage = await createStorageBackupStorage(
ctx,
storageAdapter,
getWorkspaceId(bucketName),
workspace.workspace
)
const wsUrl: WorkspaceIdWithUrl = {
name: workspace.workspace,
uuid: workspace.uuid,
workspaceName: workspace.workspaceName ?? '',
workspaceUrl: workspace.workspaceUrl ?? ''
}
const result: boolean = await ctx.with('restore', { workspace: workspace.workspace }, (ctx) =>
restore(ctx, '', getWorkspaceId(workspace.workspace), storage, {
date: -1,
skip: new Set(skipDomains),
recheck: false,
storageAdapter: workspaceStorage,
getConnection: async () => {
return wrapPipeline(ctx, await pipelineFactory(ctx, wsUrl, true, () => {}, null), wsUrl)
}
})
)
return result
} finally {
await storageAdapter.close()
}
}

View File

@ -449,7 +449,7 @@ export async function doRestoreWorkspace (
restore(ctx, '', wsIds, storage, { restore(ctx, '', wsIds, storage, {
date: -1, date: -1,
skip: new Set(skipDomains), skip: new Set(skipDomains),
recheck: true, recheck: false, // Do not need to recheck
storageAdapter: workspaceStorageAdapter, storageAdapter: workspaceStorageAdapter,
cleanIndexState, cleanIndexState,
getConnection: async () => { getConnection: async () => {

View File

@ -63,6 +63,10 @@ export function serveWorkspaceAccount (
process.exit(1) process.exit(1)
} }
if (process.env.MIGRATION_CLEANUP !== 'true') {
console.log('Migration cleanup is not set, so move to regions will not clean old DB.')
}
const backup = const backup =
wsOperation === 'all+backup' wsOperation === 'all+backup'
? { ? {

View File

@ -20,6 +20,7 @@ import {
isRestoringMode, isRestoringMode,
systemAccountUuid, systemAccountUuid,
type BrandingMap, type BrandingMap,
DOMAIN_BLOB,
type Data, type Data,
type MeasureContext, type MeasureContext,
type Tx, type Tx,
@ -68,8 +69,6 @@ import {
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage' import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
import { createWorkspace, upgradeWorkspace } from './ws-operations' import { createWorkspace, upgradeWorkspace } from './ws-operations'
const dbCleanTreshold = 256 // Cleanup workspaces if less 256mb
export interface WorkspaceOptions { export interface WorkspaceOptions {
errorHandler: (workspace: WorkspaceInfoWithStatus, error: any) => Promise<void> errorHandler: (workspace: WorkspaceInfoWithStatus, error: any) => Promise<void>
force: boolean force: boolean
@ -527,8 +526,7 @@ export class WorkspaceWorker {
await sendEvent('migrate-clean-started', 0) await sendEvent('migrate-clean-started', 0)
await this.sendTransactorMaitenance(token, workspace.uuid) await this.sendTransactorMaitenance(token, workspace.uuid)
const sz = workspace.backupInfo?.backupSize ?? 0 if (process.env.MIGRATION_CLEANUP === 'true') {
if (sz <= dbCleanTreshold) {
try { try {
await this.doCleanup(ctx, workspace, false) await this.doCleanup(ctx, workspace, false)
} catch (err: any) { } catch (err: any) {
@ -701,7 +699,7 @@ export class WorkspaceWorker {
opt.backup.bucketName, opt.backup.bucketName,
pipelineFactory, pipelineFactory,
workspaceStorageAdapter, workspaceStorageAdapter,
['blob'], [DOMAIN_BLOB],
true, true,
(_p: number) => { (_p: number) => {
if (progress !== Math.round(_p)) { if (progress !== Math.round(_p)) {