mirror of
https://github.com/hcengineering/platform.git
synced 2025-05-10 17:30:51 +00:00
UBERF-9543: Restore-all tool (#8132)
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
parent
c90259cd79
commit
5ad36e087a
20
.vscode/launch.json
vendored
20
.vscode/launch.json
vendored
@ -497,6 +497,26 @@
|
||||
"cwd": "${workspaceRoot}/dev/tool",
|
||||
"protocol": "inspector"
|
||||
},
|
||||
{
|
||||
"name": "Debug restore-all tool",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"args": ["src/__start.ts", "restore-all"],
|
||||
"env": {
|
||||
"ACCOUNTS_URL": "http://localhost:3000",
|
||||
"STORAGE": "minio|localhost?accessKey=minioadmin&secretKey=minioadmin",
|
||||
// "DB_URL": "mongodb://localhost:27017",
|
||||
"DB_URL": "postgresql://root@host.docker.internal:26258/defaultdb?sslmode=disable",
|
||||
"SERVER_SECRET": "secret",
|
||||
"BUCKET_NAME":"dev-backups"
|
||||
},
|
||||
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
|
||||
"showAsyncStacks": true,
|
||||
"sourceMaps": true,
|
||||
"outputCapture": "std",
|
||||
"cwd": "${workspaceRoot}/dev/tool",
|
||||
"protocol": "inspector"
|
||||
},
|
||||
{
|
||||
"name": "Debug Github integration",
|
||||
"type": "node",
|
||||
|
@ -78,10 +78,11 @@ import { buildStorageFromConfig, createStorageFromConfig, storageConfigFromEnv }
|
||||
import { program, type Command } from 'commander'
|
||||
import { addControlledDocumentRank } from './qms'
|
||||
import { clearTelegramHistory } from './telegram'
|
||||
import { diffWorkspace, updateField } from './workspace'
|
||||
import { backupRestore, diffWorkspace, updateField } from './workspace'
|
||||
|
||||
import core, {
|
||||
AccountRole,
|
||||
DOMAIN_BLOB,
|
||||
generateId,
|
||||
getWorkspaceId,
|
||||
isActiveMode,
|
||||
@ -697,6 +698,70 @@ export function devTool (
|
||||
}
|
||||
)
|
||||
|
||||
program
|
||||
.command('restore-all')
|
||||
.description('Restore workspaces to selected region DB...')
|
||||
.option('-t|--timeout [timeout]', 'Timeout in days', '60')
|
||||
.option('-r|--region [region]', 'Timeout in days', '')
|
||||
.option('-w|--workspace [workspace]', 'Force backup of selected workspace', '')
|
||||
.option('-d|--dry [dry]', 'Dry run', false)
|
||||
.action(async (cmd: { timeout: string, workspace: string, region: string, dry: boolean, account: string }) => {
|
||||
const { txes, dbUrl } = prepareTools()
|
||||
|
||||
const bucketName = process.env.BUCKET_NAME
|
||||
if (bucketName === '' || bucketName == null) {
|
||||
console.error('please provide butket name env')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const token = generateToken(systemAccountEmail, getWorkspaceId(''))
|
||||
const workspaces = (await listAccountWorkspaces(token, cmd.region))
|
||||
.sort((a, b) => {
|
||||
const bsize = b.backupInfo?.backupSize ?? 0
|
||||
const asize = a.backupInfo?.backupSize ?? 0
|
||||
return bsize - asize
|
||||
})
|
||||
.filter((it) => cmd.workspace === '' || cmd.workspace === it.workspace)
|
||||
|
||||
for (const ws of workspaces) {
|
||||
const lastVisitDays = Math.floor((Date.now() - ws.lastVisit) / 1000 / 3600 / 24)
|
||||
|
||||
toolCtx.warn('--- restoring workspace', {
|
||||
url: ws.workspaceUrl,
|
||||
id: ws.workspace,
|
||||
lastVisitDays,
|
||||
backupSize: ws.backupInfo?.blobsSize ?? 0,
|
||||
mode: ws.mode
|
||||
})
|
||||
if (cmd.dry) {
|
||||
continue
|
||||
}
|
||||
try {
|
||||
const st = Date.now()
|
||||
await backupRestore(
|
||||
toolCtx,
|
||||
dbUrl,
|
||||
bucketName,
|
||||
ws,
|
||||
(dbUrl, storageAdapter) => {
|
||||
const factory: PipelineFactory = createBackupPipeline(toolCtx, dbUrl, txes, {
|
||||
externalStorage: storageAdapter,
|
||||
usePassedCtx: true
|
||||
})
|
||||
return factory
|
||||
},
|
||||
[DOMAIN_BLOB]
|
||||
)
|
||||
const ed = Date.now()
|
||||
toolCtx.warn('--- restoring complete', {
|
||||
time: ed - st
|
||||
})
|
||||
} catch (err: any) {
|
||||
toolCtx.error('REstore of f workspace failedarchive workspace', { workspace: ws.workspace })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
program
|
||||
.command('backup-all')
|
||||
.description('Backup all workspaces...')
|
||||
|
@ -16,16 +16,28 @@
|
||||
|
||||
import contact from '@hcengineering/contact'
|
||||
import core, {
|
||||
DOMAIN_TX,
|
||||
getWorkspaceId,
|
||||
type BackupClient,
|
||||
type BaseWorkspaceInfo,
|
||||
type Class,
|
||||
type Client as CoreClient,
|
||||
type Doc,
|
||||
DOMAIN_TX,
|
||||
type MeasureContext,
|
||||
type Ref,
|
||||
type Tx,
|
||||
type WorkspaceId
|
||||
type WorkspaceId,
|
||||
type WorkspaceIdWithUrl
|
||||
} from '@hcengineering/core'
|
||||
import { getMongoClient, getWorkspaceMongoDB } from '@hcengineering/mongo'
|
||||
import { createStorageBackupStorage, restore } from '@hcengineering/server-backup'
|
||||
import {
|
||||
createDummyStorageAdapter,
|
||||
wrapPipeline,
|
||||
type PipelineFactory,
|
||||
type StorageAdapter
|
||||
} from '@hcengineering/server-core'
|
||||
import { createStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
|
||||
import { connect } from '@hcengineering/server-tool'
|
||||
import { generateModelDiff, printDiff } from './mdiff'
|
||||
|
||||
@ -95,3 +107,57 @@ export async function updateField (
|
||||
await connection.close()
|
||||
}
|
||||
}
|
||||
|
||||
export async function backupRestore (
|
||||
ctx: MeasureContext,
|
||||
dbURL: string,
|
||||
bucketName: string,
|
||||
workspace: BaseWorkspaceInfo,
|
||||
pipelineFactoryFactory: (mongoUrl: string, storage: StorageAdapter) => PipelineFactory,
|
||||
skipDomains: string[]
|
||||
): Promise<boolean> {
|
||||
const storageEnv = process.env.STORAGE
|
||||
if (storageEnv === undefined) {
|
||||
console.error('please provide STORAGE env')
|
||||
process.exit(1)
|
||||
}
|
||||
if (bucketName.trim() === '') {
|
||||
console.error('please provide butket name env')
|
||||
process.exit(1)
|
||||
}
|
||||
const backupStorageConfig = storageConfigFromEnv(storageEnv)
|
||||
|
||||
const storageAdapter = createStorageFromConfig(backupStorageConfig.storages[0])
|
||||
|
||||
const workspaceStorage = createDummyStorageAdapter()
|
||||
const pipelineFactory = pipelineFactoryFactory(dbURL, workspaceStorage)
|
||||
|
||||
try {
|
||||
const storage = await createStorageBackupStorage(
|
||||
ctx,
|
||||
storageAdapter,
|
||||
getWorkspaceId(bucketName),
|
||||
workspace.workspace
|
||||
)
|
||||
const wsUrl: WorkspaceIdWithUrl = {
|
||||
name: workspace.workspace,
|
||||
uuid: workspace.uuid,
|
||||
workspaceName: workspace.workspaceName ?? '',
|
||||
workspaceUrl: workspace.workspaceUrl ?? ''
|
||||
}
|
||||
const result: boolean = await ctx.with('restore', { workspace: workspace.workspace }, (ctx) =>
|
||||
restore(ctx, '', getWorkspaceId(workspace.workspace), storage, {
|
||||
date: -1,
|
||||
skip: new Set(skipDomains),
|
||||
recheck: false,
|
||||
storageAdapter: workspaceStorage,
|
||||
getConnection: async () => {
|
||||
return wrapPipeline(ctx, await pipelineFactory(ctx, wsUrl, true, () => {}, null), wsUrl)
|
||||
}
|
||||
})
|
||||
)
|
||||
return result
|
||||
} finally {
|
||||
await storageAdapter.close()
|
||||
}
|
||||
}
|
||||
|
@ -451,7 +451,7 @@ export async function doRestoreWorkspace (
|
||||
restore(ctx, '', getWorkspaceId(ws.workspace), storage, {
|
||||
date: -1,
|
||||
skip: new Set(skipDomains),
|
||||
recheck: true,
|
||||
recheck: false, // Do not need to recheck
|
||||
storageAdapter: workspaceStorageAdapter,
|
||||
cleanIndexState,
|
||||
getConnection: async () => {
|
||||
|
@ -63,6 +63,10 @@ export function serveWorkspaceAccount (
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (process.env.MIGRATION_CLEANUP !== 'true') {
|
||||
console.log('Migration cleanup is not set, so move to regions will not clean old DB.')
|
||||
}
|
||||
|
||||
const backup =
|
||||
wsOperation === 'all+backup'
|
||||
? {
|
||||
|
@ -15,6 +15,7 @@
|
||||
import {
|
||||
type BaseWorkspaceInfo,
|
||||
type BrandingMap,
|
||||
DOMAIN_BLOB,
|
||||
type Data,
|
||||
type MeasureContext,
|
||||
type Tx,
|
||||
@ -70,8 +71,6 @@ import {
|
||||
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
|
||||
import { createWorkspace, upgradeWorkspace } from './ws-operations'
|
||||
|
||||
const dbCleanTreshold = 256 // Cleanup workspaces if less 256mb
|
||||
|
||||
export interface WorkspaceOptions {
|
||||
errorHandler: (workspace: BaseWorkspaceInfo, error: any) => Promise<void>
|
||||
force: boolean
|
||||
@ -498,8 +497,7 @@ export class WorkspaceWorker {
|
||||
await sendEvent('migrate-clean-started', 0)
|
||||
await this.sendTransactorMaitenance(token, { name: workspace.workspace })
|
||||
|
||||
const sz = workspace.backupInfo?.backupSize ?? 0
|
||||
if (sz <= dbCleanTreshold) {
|
||||
if (process.env.MIGRATION_CLEANUP === 'true') {
|
||||
try {
|
||||
await this.doCleanup(ctx, workspace, false)
|
||||
} catch (err: any) {
|
||||
@ -651,7 +649,7 @@ export class WorkspaceWorker {
|
||||
opt.backup.bucketName,
|
||||
pipelineFactory,
|
||||
workspaceStorageAdapter,
|
||||
['blob'],
|
||||
[DOMAIN_BLOB],
|
||||
true,
|
||||
(_p: number) => {
|
||||
if (progress !== Math.round(_p)) {
|
||||
|
Loading…
Reference in New Issue
Block a user