Fix backup all and disable full check for migration

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2025-03-05 14:19:03 +07:00
parent 059cab66fb
commit 622844c42f
No known key found for this signature in database
GPG Key ID: BD80F68D68D8F7F2
4 changed files with 80 additions and 56 deletions

View File

@ -25,7 +25,15 @@ import accountPlugin, {
type AccountDB
} from '@hcengineering/account'
import { setMetadata } from '@hcengineering/platform'
import { backup, createFileBackupStorage, createStorageBackupStorage, restore } from '@hcengineering/server-backup'
import {
backup,
backupFind,
checkBackupIntegrity,
compactBackup,
createFileBackupStorage,
createStorageBackupStorage,
restore
} from '@hcengineering/server-backup'
import serverClientPlugin, { getAccountClient } from '@hcengineering/server-client'
import {
registerAdapterFactory,
@ -36,7 +44,7 @@ import {
setAdapterSecurity,
sharedPipelineContextVars
} from '@hcengineering/server-pipeline'
import serverToken from '@hcengineering/server-token'
import serverToken, { generateToken } from '@hcengineering/server-token'
import { createWorkspace, upgradeWorkspace } from '@hcengineering/workspace-service'
import { buildStorageFromConfig, createStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
@ -47,12 +55,15 @@ import {
AccountRole,
MeasureMetricsContext,
metricsToString,
type PersonId,
type WorkspaceUuid,
systemAccountUuid,
type Data,
type Doc,
type PersonId,
type Ref,
type Tx,
type Version,
type WorkspaceDataId
type WorkspaceDataId,
type WorkspaceUuid
} from '@hcengineering/core'
import { consoleModelLogger, type MigrateOperation } from '@hcengineering/model'
import {
@ -76,8 +87,8 @@ import { getAccountDBUrl, getMongoDBUrl } from './__start'
import { changeConfiguration } from './configuration'
import { reindexWorkspace } from './fulltext'
import { getToolToken, getWorkspace, getWorkspaceTransactorEndpoint } from './utils'
import { moveAccountDbFromMongoToPG } from './db'
import { getToolToken, getWorkspace, getWorkspaceTransactorEndpoint } from './utils'
const colorConstants = {
colorRed: '\u001b[31m',
@ -958,30 +969,30 @@ export function devTool (
})
}
)
// program
// .command('backup-find <dirName> <fileId>')
// .description('dump workspace transactions and minio resources')
// .option('-d, --domain <domain>', 'Check only domain')
// .action(async (dirName: string, fileId: string, cmd: { domain: string | undefined }) => {
// const storage = await createFileBackupStorage(dirName)
// await backupFind(storage, fileId as unknown as Ref<Doc>, cmd.domain)
// })
program
.command('backup-find <dirName> <fileId>')
.description('dump workspace transactions and minio resources')
.option('-d, --domain <domain>', 'Check only domain')
.action(async (dirName: string, fileId: string, cmd: { domain: string | undefined }) => {
const storage = await createFileBackupStorage(dirName)
await backupFind(storage, fileId as unknown as Ref<Doc>, cmd.domain)
})
// program
// .command('backup-compact <dirName>')
// .description('Compact a given backup, will create one snapshot clean unused resources')
// .option('-f, --force', 'Force compact.', false)
// .action(async (dirName: string, cmd: { force: boolean }) => {
// const storage = await createFileBackupStorage(dirName)
// await compactBackup(toolCtx, storage, cmd.force)
// })
// program
// .command('backup-check <dirName>')
// .description('Compact a given backup, will create one snapshot clean unused resources')
// .action(async (dirName: string, cmd: any) => {
// const storage = await createFileBackupStorage(dirName)
// await checkBackupIntegrity(toolCtx, storage)
// })
program
.command('backup-compact <dirName>')
.description('Compact a given backup, will create one snapshot clean unused resources')
.option('-f, --force', 'Force compact.', false)
.action(async (dirName: string, cmd: { force: boolean }) => {
const storage = await createFileBackupStorage(dirName)
await compactBackup(toolCtx, storage, cmd.force)
})
program
.command('backup-check <dirName>')
.description('Compact a given backup, will create one snapshot clean unused resources')
.action(async (dirName: string, cmd: any) => {
const storage = await createFileBackupStorage(dirName)
await checkBackupIntegrity(toolCtx, storage)
})
program
.command('backup-check-all')
@ -999,23 +1010,23 @@ export function devTool (
const skipWorkspaces = new Set(cmd.skip.split(',').map((it) => it.trim()))
const token = generateToken(systemAccountEmail, getWorkspaceId(''))
const workspaces = (await listAccountWorkspaces(token, cmd.region))
const token = generateToken(systemAccountUuid, '' as WorkspaceUuid)
const workspaces = (await getAccountClient(token).listWorkspaces(cmd.region))
.sort((a, b) => {
const bsize = b.backupInfo?.backupSize ?? 0
const asize = a.backupInfo?.backupSize ?? 0
return bsize - asize
})
.filter((it) => (cmd.workspace === '' || cmd.workspace === it.workspace) && !skipWorkspaces.has(it.workspace))
.filter((it) => (cmd.workspace === '' || cmd.workspace === it.url) && !skipWorkspaces.has(it.url))
const backupStorageConfig = storageConfigFromEnv(process.env.STORAGE)
const storageAdapter = createStorageFromConfig(backupStorageConfig.storages[0])
for (const ws of workspaces) {
const lastVisitDays = Math.floor((Date.now() - ws.lastVisit) / 1000 / 3600 / 24)
const lastVisitDays = Math.floor((Date.now() - (ws.lastVisit ?? 0)) / 1000 / 3600 / 24)
toolCtx.warn('--- checking workspace backup', {
url: ws.workspaceUrl,
id: ws.workspace,
url: ws.url,
id: ws.uuid,
lastVisitDays,
backupSize: ws.backupInfo?.blobsSize ?? 0,
mode: ws.mode
@ -1030,8 +1041,12 @@ export function devTool (
const storage = await createStorageBackupStorage(
toolCtx,
storageAdapter,
getWorkspaceId(bucketName),
ws.workspace
{
uuid: 'backup' as WorkspaceUuid,
url: 'backup',
dataId: bucketName as WorkspaceDataId
},
ws.dataId ?? ws.uuid
)
await checkBackupIntegrity(toolCtx, storage)
} catch (err: any) {
@ -1042,7 +1057,7 @@ export function devTool (
time: ed - st
})
} catch (err: any) {
toolCtx.error('REstore of f workspace failedarchive workspace', { workspace: ws.workspace })
toolCtx.error('Restore of f workspace failedarchive workspace', { workspace: ws.url })
}
}
await storageAdapter.close()

View File

@ -16,15 +16,16 @@
import core, {
DOMAIN_TX,
getWorkspaceId,
type BackupClient,
type BaseWorkspaceInfo,
type Class,
type Client as CoreClient,
type Doc,
type MeasureContext,
type Ref,
type Tx,
type WorkspaceDataId,
type WorkspaceIds,
type WorkspaceInfoWithStatus,
type WorkspaceUuid
} from '@hcengineering/core'
import { getMongoClient, getWorkspaceMongoDB } from '@hcengineering/mongo'
@ -120,7 +121,7 @@ export async function backupRestore (
ctx: MeasureContext,
dbURL: string,
bucketName: string,
workspace: BaseWorkspaceInfo,
workspace: WorkspaceInfoWithStatus,
pipelineFactoryFactory: (mongoUrl: string, storage: StorageAdapter) => PipelineFactory,
skipDomains: string[]
): Promise<boolean> {
@ -144,17 +145,20 @@ export async function backupRestore (
const storage = await createStorageBackupStorage(
ctx,
storageAdapter,
getWorkspaceId(bucketName),
workspace.workspace
{
uuid: 'backup' as WorkspaceUuid,
url: bucketName,
dataId: bucketName as WorkspaceDataId
},
workspace.dataId ?? workspace.uuid
)
const wsUrl: WorkspaceIdWithUrl = {
name: workspace.workspace,
const wsUrl: WorkspaceIds = {
uuid: workspace.uuid,
workspaceName: workspace.workspaceName ?? '',
workspaceUrl: workspace.workspaceUrl ?? ''
dataId: workspace.dataId,
url: workspace.url
}
const result: boolean = await ctx.with('restore', { workspace: workspace.workspace }, (ctx) =>
restore(ctx, '', getWorkspaceId(workspace.workspace), storage, {
const result: boolean = await ctx.with('restore', { workspace: workspace.url }, (ctx) =>
restore(ctx, '', wsUrl, storage, {
date: -1,
skip: new Set(skipDomains),
recheck: false,

View File

@ -2062,7 +2062,7 @@ export async function restore (
if (docsToAdd.size === 0) {
break
}
ctx.info('processing', { storageFile: sf, processed, workspace: workspaceId.name })
ctx.info('processing', { storageFile: sf, processed, workspace: wsIds.url })
try {
const readStream = await storage.load(sf)
const ex = extract()
@ -2107,7 +2107,7 @@ export async function restore (
try {
doc = JSON.parse(bf.toString()) as Doc
} catch (err) {
ctx.warn('failed to parse blob metadata', { name, workspace: workspaceId.name, err })
ctx.warn('failed to parse blob metadata', { name, workspace: wsIds.url, err })
next()
return
}
@ -2165,7 +2165,7 @@ export async function restore (
await endPromise
} catch (err: any) {
ctx.error('failed to processing', { storageFile: sf, processed, workspace: workspaceId.name })
ctx.error('failed to processing', { storageFile: sf, processed, workspace: wsIds.url })
}
}
}

View File

@ -479,7 +479,7 @@ export class WorkspaceWorker {
await sendEvent('archiving-backup-started', 0)
await this.sendTransactorMaitenance(token, workspace.uuid)
if (await this.doBackup(ctx, workspace, opt)) {
if (await this.doBackup(ctx, workspace, opt, true)) {
await sendEvent('archiving-backup-done', 100)
}
break
@ -516,7 +516,7 @@ export class WorkspaceWorker {
case 'migration-backup':
await sendEvent('migrate-backup-started', 0)
await this.sendTransactorMaitenance(token, workspace.uuid)
if (await this.doBackup(ctx, workspace, opt)) {
if (await this.doBackup(ctx, workspace, opt, false)) {
await sendEvent('migrate-backup-done', 100)
}
break
@ -551,7 +551,12 @@ export class WorkspaceWorker {
}
}
private async doBackup (ctx: MeasureContext, workspace: WorkspaceInfoWithStatus, opt: WorkspaceOptions): Promise<boolean> {
private async doBackup (
ctx: MeasureContext,
workspace: WorkspaceInfoWithStatus,
opt: WorkspaceOptions,
doFullCheck: boolean
): Promise<boolean> {
if (opt.backup === undefined) {
return false
}
@ -619,7 +624,7 @@ export class WorkspaceWorker {
50000,
['blob'],
sharedPipelineContextVars,
true, // Do a full check
doFullCheck, // Do full check based on config, do not do for migration, it is to slow, will perform before migration.
(_p: number) => {
if (progress !== Math.round(_p)) {
progress = Math.round(_p)