QFIX: Improve backup find tool (#8783)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2025-04-30 16:39:13 +07:00 committed by GitHub
parent a054539a16
commit 5caa14dbf2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 33 additions and 9 deletions

View File

@ -992,9 +992,11 @@ export function devTool (
.command('backup-find <dirName> <fileId>')
.description('dump workspace transactions and minio resources')
.option('-d, --domain <domain>', 'Check only domain')
.action(async (dirName: string, fileId: string, cmd: { domain: string | undefined }) => {
.option('-a, --all', 'Show all versions', false)
.action(async (dirName: string, fileId: string, cmd: { domain: string | undefined, all: boolean }) => {
const storage = await createFileBackupStorage(dirName)
await backupFind(storage, fileId as unknown as Ref<Doc>, cmd.domain)
console.log(cmd.all)
await backupFind(storage, fileId as unknown as Ref<Doc>, cmd.all, cmd.domain)
})
program

View File

@ -196,7 +196,7 @@ async function loadDigest (
break
}
}
ctx.info('load-digest', { domain, snapshots: snapshots.length, documents: result.size })
// ctx.info('load-digest', { domain, snapshots: snapshots.length, documents: result.size })
return result
}
async function verifyDigest (
@ -1668,7 +1668,12 @@ export async function backupDownload (storage: BackupStorage, storeIn: string):
/**
* @public
*/
export async function backupFind (storage: BackupStorage, id: Ref<Doc>, domain?: string): Promise<void> {
export async function backupFind (
storage: BackupStorage,
id: Ref<Doc>,
showAll: boolean,
domain?: string
): Promise<void> {
const infoFile = 'backup.json.gz'
if (!(await storage.exists(infoFile))) {
@ -1698,25 +1703,42 @@ export async function backupFind (storage: BackupStorage, id: Ref<Doc>, domain?:
console.log('we found file')
let found = false
for (const sn of rnapshots) {
const ssDigest = await loadDigest(toolCtx, storage, [sn], dd)
if (!ssDigest.has(id)) {
continue
}
const d = sn.domains[dd]
if (found) {
if (found && !showAll) {
break
}
for (const sf of d?.storage ?? []) {
if (found) {
if (found && !showAll) {
break
}
console.log('processing', sf)
const readStream = await storage.load(sf)
const ex = extract()
ex.on('entry', (headers, stream, next) => {
if (headers.name === id + '.json') {
console.log('file found in:', sf)
const chunks: Buffer[] = []
stream.on('data', (chunk) => {
chunks.push(chunk)
})
stream.on('end', () => {
const bf = Buffer.concat(chunks as any)
console.log('>>>>>>>>>>>')
console.log(JSON.stringify(JSON.parse(bf.toString()), undefined, 2))
console.log('>>>>>>>>>>>')
next()
})
found = true
} else {
stream.resume() // auto drain for non-matching entries
next() // continue to the next entry
}
next()
stream.resume() // just auto drain the stream
})
const endPromise = new Promise((resolve) => {