From 4dcfc5e5334372cc66d0c352ec22f039d01bb6d3 Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Thu, 27 Jun 2024 15:07:15 +0700 Subject: [PATCH] UBERF-7411: Allow to backup blobs with wrong size (#5926) Signed-off-by: Andrey Sobolev --- packages/text-editor/src/provider/storage.ts | 16 ++++- packages/ui/src/components/Loading.svelte | 2 +- plugins/workbench-resources/src/connect.ts | 4 +- server/account/src/operations.ts | 16 +++-- server/backup/src/backup.ts | 71 +++++++++++--------- server/server-storage/src/blobStorage.ts | 2 + 6 files changed, 69 insertions(+), 42 deletions(-) diff --git a/packages/text-editor/src/provider/storage.ts b/packages/text-editor/src/provider/storage.ts index bb8f8e9f6a..808cacd96d 100644 --- a/packages/text-editor/src/provider/storage.ts +++ b/packages/text-editor/src/provider/storage.ts @@ -12,8 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. // -import { type Blob, type CollaborativeDoc, type Ref, collaborativeDocParse } from '@hcengineering/core' -import { getBlobHref } from '@hcengineering/presentation' +import core, { + collaborativeDocParse, + type Blob, + type BlobLookup, + type CollaborativeDoc, + type Ref +} from '@hcengineering/core' +import { getBlobHref, getClient } from '@hcengineering/presentation' import { ObservableV2 as Observable } from 'lib0/observable' import { applyUpdate, type Doc as YDoc } from 'yjs' @@ -33,7 +39,11 @@ async function fetchContent (blob: Ref, doc: YDoc): Promise { async function fetchBlobContent (_id: Ref): Promise { try { - const href = await getBlobHref(undefined, _id) + const blob = (await getClient().findOne(core.class.Blob, { _id })) as BlobLookup + if (blob.size === 0) { + return undefined + } + const href = await getBlobHref(blob, _id) const res = await fetch(href) if (res.ok) { diff --git a/packages/ui/src/components/Loading.svelte b/packages/ui/src/components/Loading.svelte index 734e4b4976..9f505ce2b7 100644 --- a/packages/ui/src/components/Loading.svelte +++ b/packages/ui/src/components/Loading.svelte @@ -66,7 +66,7 @@ opacity: 0; animation-name: makeVisible; animation-duration: 0.25s; - animation-delay: 0.1s; + animation-delay: 0.25s; animation-fill-mode: forwards; &.labeled { diff --git a/plugins/workbench-resources/src/connect.ts b/plugins/workbench-resources/src/connect.ts index 192de11431..3ad7fcf374 100644 --- a/plugins/workbench-resources/src/connect.ts +++ b/plugins/workbench-resources/src/connect.ts @@ -90,12 +90,12 @@ export async function connect (title: string): Promise { setMetadata(presentation.metadata.Token, token) const fetchWorkspace = await getResource(login.function.FetchWorkspace) - let loginInfo = await ctx.with('select-workspace', {}, async () => (await fetchWorkspace(ws))[1]) + let loginInfo = await ctx.with('fetch-workspace', {}, async () => (await fetchWorkspace(ws))[1]) if (loginInfo?.creating === true) { while (true) { if (ws !== getCurrentLocation().path[1]) return workspaceCreating.set(loginInfo?.createProgress ?? 0) - loginInfo = await ctx.with('select-workspace', {}, async () => (await fetchWorkspace(ws))[1]) + loginInfo = await ctx.with('fetch-workspace', {}, async () => (await fetchWorkspace(ws))[1]) workspaceCreating.set(loginInfo?.createProgress) if (loginInfo?.creating === false) { workspaceCreating.set(-1) diff --git a/server/account/src/operations.ts b/server/account/src/operations.ts index 216bb11330..c57938bf42 100644 --- a/server/account/src/operations.ts +++ b/server/account/src/operations.ts @@ -50,7 +50,7 @@ import { getModelVersion } from '@hcengineering/model-all' import platform, { getMetadata, PlatformError, Severity, Status, translate } from '@hcengineering/platform' import { cloneWorkspace } from '@hcengineering/server-backup' import { decodeToken, generateToken } from '@hcengineering/server-token' -import toolPlugin, { connect, initModel, upgradeModel, getStorageAdapter } from '@hcengineering/server-tool' +import toolPlugin, { connect, getStorageAdapter, initModel, upgradeModel } from '@hcengineering/server-tool' import { pbkdf2Sync, randomBytes } from 'crypto' import { Binary, Db, Filter, ObjectId, type MongoClient } from 'mongodb' import fetch from 'node-fetch' @@ -1297,7 +1297,7 @@ export async function getWorkspaceInfo ( workspace: workspace.name } if (email !== systemAccountEmail && !guest) { - account = await getAccount(db, email) + account = await ctx.with('get-account', {}, async () => await getAccount(db, email)) if (account === null) { ctx.error('no account', { email, productId, token }) throw new PlatformError(new Status(Severity.ERROR, platform.status.Forbidden, {})) @@ -1318,15 +1318,19 @@ export async function getWorkspaceInfo ( query._id = { $in: account.workspaces } } - const [ws] = ( - await db.collection(WORKSPACE_COLLECTION).find(withProductId(productId, query)).toArray() - ).filter((it) => it.disabled !== true || account?.admin === true || it.creating === true) + const [ws] = await ctx.with('get-account', {}, async () => + (await db.collection(WORKSPACE_COLLECTION).find(withProductId(productId, query)).toArray()).filter( + (it) => it.disabled !== true || account?.admin === true || it.creating === true + ) + ) if (ws == null) { ctx.error('no workspace', { workspace: workspace.name, email }) throw new PlatformError(new Status(Severity.ERROR, platform.status.Forbidden, {})) } if (_updateLastVisit && isAccount(account)) { - await updateLastVisit(db, ws, account) + void ctx.with('update-last-visit', {}, async () => { + await updateLastVisit(db, ws, account as Account) + }) } return mapToClientWorkspace(ws) } diff --git a/server/backup/src/backup.ts b/server/backup/src/backup.ts index cc6717ec11..92ea016db6 100644 --- a/server/backup/src/backup.ts +++ b/server/backup/src/backup.ts @@ -35,6 +35,7 @@ import core, { } from '@hcengineering/core' import type { StorageAdapter } from '@hcengineering/server-core' import { BlobClient, connect } from '@hcengineering/server-tool' +import { mkdtemp, writeFile } from 'node:fs/promises' import { PassThrough } from 'node:stream' import { createGzip } from 'node:zlib' import { join } from 'path' @@ -554,6 +555,8 @@ export async function backup ( const blobClient = new BlobClient(transactorUrl, workspaceId) ctx.info('starting backup', { workspace: workspaceId.name }) + let tmpDir: string | undefined + try { const domains = [ ...connection @@ -867,45 +870,52 @@ export async function backup ( } let blobFiled = false - if (blob.size !== 0 && !(await blobClient.checkFile(ctx, blob._id))) { - ctx.error('failed to download blob', { blob: blob._id, provider: blob.provider }) - processChanges(d, true) - continue - } - addedDocuments += descrJson.length addedDocuments += blob.size - _pack.entry({ name: d._id + '.json' }, descrJson, function (err) { - if (err != null) throw err - }) printDownloaded(blob._id, descrJson.length) try { - const entry = _pack?.entry({ name: d._id, size: blob.size }, (err) => { + const buffers: Buffer[] = [] + await blobClient.writeTo(ctx, blob._id, blob.size, { + write (buffer, cb) { + buffers.push(buffer) + cb() + }, + end: (cb: () => void) => { + cb() + } + }) + + const finalBuffer = Buffer.concat(buffers) + if (finalBuffer.length !== blob.size) { + tmpDir = tmpDir ?? (await mkdtemp('backup', {})) + const tmpFile = join(tmpDir, blob._id) + await writeFile(tmpFile, finalBuffer) + await writeFile(tmpFile + '.json', JSON.stringify(blob, undefined, 2)) + ctx.error('download blob size mismatch', { + _id: blob._id, + contentType: blob.contentType, + size: blob.size, + provider: blob.provider, + tempDir: tmpDir + }) + } + _pack.entry({ name: d._id + '.json' }, descrJson, (err) => { + if (err != null) throw err + }) + _pack?.entry({ name: d._id, size: finalBuffer.length }, finalBuffer, (err) => { if (err != null) { ctx.error('error packing file', { err }) } }) - if (blob.size === 0) { - entry.end() - } else { - // if (blob.size > 1024 * 1024) { - ctx.info('download blob', { - _id: blob._id, - contentType: blob.contentType, - size: blob.size, - provider: blob.provider - }) - // } - await blobClient.writeTo(ctx, blob._id, blob.size, { - write (buffer, cb) { - entry.write(buffer, cb) - }, - end: (cb: () => void) => { - entry.end(cb) - } - }) - } + // if (blob.size > 1024 * 1024) { + ctx.info('download blob', { + _id: blob._id, + contentType: blob.contentType, + size: blob.size, + provider: blob.provider, + pending: docs.length + }) printDownloaded(blob._id, blob.size) } catch (err: any) { @@ -916,6 +926,7 @@ export async function backup ( } blobFiled = true } + processChanges(d, blobFiled) } else { const data = JSON.stringify(d) diff --git a/server/server-storage/src/blobStorage.ts b/server/server-storage/src/blobStorage.ts index b8c7389fb2..34911dab32 100644 --- a/server/server-storage/src/blobStorage.ts +++ b/server/server-storage/src/blobStorage.ts @@ -88,6 +88,8 @@ class StorageBlobAdapter implements DbAdapter { blob.contentType = blobStat.contentType blob.version = blobStat.version blob.size = blobStat.size + delete (blob as any).downloadUrl + delete (blob as any).downloadUrlExpire toUpload.push(blob) }