From c3a41ea1bb99deaae1a948205bec90e3277b9da1 Mon Sep 17 00:00:00 2001
From: Andrey Sobolev <haiodo@users.noreply.github.com>
Date: Mon, 16 Sep 2024 21:18:28 +0700
Subject: [PATCH 1/6] Print downloaded fix for backup (#6588)

---
 server/backup/src/backup.ts | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/server/backup/src/backup.ts b/server/backup/src/backup.ts
index cc74fa2afc..d92f368002 100644
--- a/server/backup/src/backup.ts
+++ b/server/backup/src/backup.ts
@@ -609,7 +609,10 @@ export async function backup (
     let downloadedMb = 0
     let downloaded = 0
 
-    const printDownloaded = (msg: string, size: number): void => {
+    const printDownloaded = (msg: string, size?: number | null): void => {
+      if (size == null || Number.isNaN(size)) {
+        return
+      }
       downloaded += size
       const newDownloadedMb = Math.round(downloaded / (1024 * 1024))
       const newId = Math.round(newDownloadedMb / 10)

From b06f433babd8c5dc89aa4e1f63599d6b1b0ff2a3 Mon Sep 17 00:00:00 2001
From: Andrey Sobolev <haiodo@gmail.com>
Date: Tue, 17 Sep 2024 02:25:31 +0700
Subject: [PATCH 2/6] UBERF-8122: Fix backup service

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
---
 .vscode/launch.json                      |   7 +-
 dev/tool/package.json                    |   3 +-
 dev/tool/src/clean.ts                    |  29 +++---
 dev/tool/src/storage.ts                  | 125 ++++++++++++-----------
 packages/core/src/server.ts              |   2 +-
 packages/storage/src/index.ts            |  26 ++---
 pods/server/Dockerfile                   |   2 -
 server/backup/src/backup.ts              |  69 ++++++-------
 server/core/src/__tests__/memAdapters.ts |   5 +-
 server/core/src/adapter.ts               |   2 +-
 server/core/src/mem.ts                   |   2 +-
 server/core/src/server/aggregator.ts     |  62 +++++------
 server/core/src/storage.ts               |  14 +--
 server/minio/src/index.ts                |  14 +--
 server/mongo/src/rawAdapter.ts           |  12 ++-
 server/mongo/src/storage.ts              |  82 ++++++++-------
 server/mongo/src/utils.ts                |  21 +---
 server/postgres/src/storage.ts           |  79 +++++++-------
 server/s3/src/index.ts                   |  11 +-
 server/server-storage/src/blobStorage.ts |   4 +-
 20 files changed, 287 insertions(+), 284 deletions(-)

diff --git a/.vscode/launch.json b/.vscode/launch.json
index 2e571562ed..ac8ccac484 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -221,7 +221,7 @@
       "name": "Debug backup tool",
       "type": "node",
       "request": "launch",
-      "args": ["src/index.ts", "backup", "../../../dump/alex-staff-agency2", "alex-staff-agency"],
+      "args": ["src/index.ts", "backup", "../../../dump/platform2", "platform"],
       "env": {
         "MINIO_ACCESS_KEY": "minioadmin",
         "MINIO_SECRET_KEY": "minioadmin",
@@ -234,7 +234,10 @@
       "runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
       "sourceMaps": true,
       "cwd": "${workspaceRoot}/dev/tool",
-      "protocol": "inspector"
+      "protocol": "inspector",
+      "outputCapture": "std",
+      "runtimeVersion": "20",
+      "showAsyncStacks": true,
     },
     {
       "name": "Debug tool upgrade",
diff --git a/dev/tool/package.json b/dev/tool/package.json
index 476c331442..bd62efdc73 100644
--- a/dev/tool/package.json
+++ b/dev/tool/package.json
@@ -14,12 +14,13 @@
     "_phase:bundle": "rushx bundle",
     "_phase:docker-build": "rushx docker:build",
     "_phase:docker-staging": "rushx docker:staging",
-    "bundle": "mkdir -p bundle && esbuild src/__start.ts --bundle --minify --platform=node --define:process.env.MODEL_VERSION=$(node ../../common/scripts/show_version.js) --define:process.env.GIT_REVISION=$(../../common/scripts/git_version.sh) > bundle/bundle.js",
+    "bundle": "mkdir -p bundle && esbuild src/__start.ts --bundle --keep-names --sourcemap=external --platform=node --define:process.env.MODEL_VERSION=$(node ../../common/scripts/show_version.js) --define:process.env.GIT_REVISION=$(../../common/scripts/git_version.sh) --log-level=error --outfile=bundle/bundle.js",
     "docker:build": "../../common/scripts/docker_build.sh hardcoreeng/tool",
     "docker:tbuild": "docker build -t hardcoreeng/tool . --platform=linux/amd64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/tool",
     "docker:staging": "../../common/scripts/docker_tag.sh hardcoreeng/tool staging",
     "docker:push": "../../common/scripts/docker_tag.sh hardcoreeng/tool",
     "run-local": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost MONGO_URL=mongodb://localhost:27017 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --max-old-space-size=18000 ./bundle/bundle.js",
+    "run-local-brk": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost MONGO_URL=mongodb://localhost:27017 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --inspect-brk --enable-source-maps --max-old-space-size=18000 ./bundle/bundle.js",
     "run": "rush bundle --to @hcengineering/tool >/dev/null && cross-env node --max-old-space-size=8000 ./bundle/bundle.js",
     "upgrade": "rushx run-local upgrade",
     "format": "format src",
diff --git a/dev/tool/src/clean.ts b/dev/tool/src/clean.ts
index 612dcb0a92..cd199eec1e 100644
--- a/dev/tool/src/clean.ts
+++ b/dev/tool/src/clean.ts
@@ -104,12 +104,15 @@ export async function cleanWorkspace (
     const minioList = await storageAdapter.listStream(ctx, workspaceId)
     const toClean: string[] = []
     while (true) {
-      const mv = await minioList.next()
-      if (mv === undefined) {
+      const mvFiles = await minioList.next()
+      if (mvFiles.length === 0) {
         break
       }
-      if (!files.has(mv._id)) {
-        toClean.push(mv._id)
+
+      for (const mv of mvFiles) {
+        if (!files.has(mv._id)) {
+          toClean.push(mv._id)
+        }
       }
     }
     await storageAdapter.remove(ctx, workspaceId, toClean)
@@ -192,16 +195,18 @@ export async function fixMinioBW (
   const list = await storageService.listStream(ctx, workspaceId)
   let removed = 0
   while (true) {
-    const obj = await list.next()
-    if (obj === undefined) {
+    const objs = await list.next()
+    if (objs.length === 0) {
       break
     }
-    if (obj.modifiedOn < from) continue
-    if ((obj._id as string).includes('%preview%')) {
-      await storageService.remove(ctx, workspaceId, [obj._id])
-      removed++
-      if (removed % 100 === 0) {
-        console.log('removed: ', removed)
+    for (const obj of objs) {
+      if (obj.modifiedOn < from) continue
+      if ((obj._id as string).includes('%preview%')) {
+        await storageService.remove(ctx, workspaceId, [obj._id])
+        removed++
+        if (removed % 100 === 0) {
+          console.log('removed: ', removed)
+        }
       }
     }
   }
diff --git a/dev/tool/src/storage.ts b/dev/tool/src/storage.ts
index 1dc0f55d96..e5a305a910 100644
--- a/dev/tool/src/storage.ts
+++ b/dev/tool/src/storage.ts
@@ -40,20 +40,22 @@ export async function syncFiles (
       const iterator = await adapter.listStream(ctx, workspaceId)
       try {
         while (true) {
-          const data = await iterator.next()
-          if (data === undefined) break
+          const dataBulk = await iterator.next()
+          if (dataBulk.length === 0) break
 
-          const blob = await exAdapter.stat(ctx, workspaceId, data._id)
-          if (blob !== undefined) continue
+          for (const data of dataBulk) {
+            const blob = await exAdapter.stat(ctx, workspaceId, data._id)
+            if (blob !== undefined) continue
 
-          await exAdapter.syncBlobFromStorage(ctx, workspaceId, data._id, name)
+            await exAdapter.syncBlobFromStorage(ctx, workspaceId, data._id, name)
 
-          count += 1
-          if (count % 100 === 0) {
-            const duration = Date.now() - time
-            time = Date.now()
+            count += 1
+            if (count % 100 === 0) {
+              const duration = Date.now() - time
+              time = Date.now()
 
-            console.log('...processed', count, Math.round(duration / 1000) + 's')
+              console.log('...processed', count, Math.round(duration / 1000) + 's')
+            }
           }
         }
         console.log('processed', count)
@@ -112,64 +114,67 @@ async function processAdapter (
   const iterator = await source.listStream(ctx, workspaceId)
   try {
     while (true) {
-      const data = await iterator.next()
-      if (data === undefined) break
+      const dataBulk = await iterator.next()
+      if (dataBulk.length === 0) break
 
-      const blob = (await exAdapter.stat(ctx, workspaceId, data._id)) ?? (await source.stat(ctx, workspaceId, data._id))
+      for (const data of dataBulk) {
+        const blob =
+          (await exAdapter.stat(ctx, workspaceId, data._id)) ?? (await source.stat(ctx, workspaceId, data._id))
 
-      if (blob === undefined) {
-        console.error('blob not found', data._id)
-        continue
-      }
-
-      if (blob.provider !== exAdapter.defaultAdapter) {
-        if (blob.size <= params.blobSizeLimitMb * 1024 * 1024) {
-          await rateLimiter.exec(async () => {
-            try {
-              await retryOnFailure(
-                ctx,
-                5,
-                async () => {
-                  await processFile(ctx, source, params.move ? exAdapter : target, workspaceId, blob)
-                },
-                50
-              )
-              movedCnt += 1
-              movedBytes += blob.size
-              batchBytes += blob.size
-            } catch (err) {
-              console.error('failed to process blob', data._id, err)
-            }
-          })
-        } else {
-          skippedCnt += 1
-          console.log('skipping large blob', data._id, Math.round(blob.size / 1024 / 1024))
+        if (blob === undefined) {
+          console.error('blob not found', data._id)
+          continue
         }
-      }
 
-      processedCnt += 1
-      processedBytes += blob.size
+        if (blob.provider !== exAdapter.defaultAdapter) {
+          if (blob.size <= params.blobSizeLimitMb * 1024 * 1024) {
+            await rateLimiter.exec(async () => {
+              try {
+                await retryOnFailure(
+                  ctx,
+                  5,
+                  async () => {
+                    await processFile(ctx, source, params.move ? exAdapter : target, workspaceId, blob)
+                  },
+                  50
+                )
+                movedCnt += 1
+                movedBytes += blob.size
+                batchBytes += blob.size
+              } catch (err) {
+                console.error('failed to process blob', data._id, err)
+              }
+            })
+          } else {
+            skippedCnt += 1
+            console.log('skipping large blob', data._id, Math.round(blob.size / 1024 / 1024))
+          }
+        }
 
-      if (processedCnt % 100 === 0) {
-        await rateLimiter.waitProcessing()
+        processedCnt += 1
+        processedBytes += blob.size
 
-        const duration = Date.now() - time
+        if (processedCnt % 100 === 0) {
+          await rateLimiter.waitProcessing()
 
-        console.log(
-          '...processed',
-          processedCnt,
-          Math.round(processedBytes / 1024 / 1024) + 'MB',
-          'moved',
-          movedCnt,
-          Math.round(movedBytes / 1024 / 1024) + 'MB',
-          '+' + Math.round(batchBytes / 1024 / 1024) + 'MB',
-          'skipped',
-          skippedCnt,
-          Math.round(duration / 1000) + 's'
-        )
+          const duration = Date.now() - time
 
-        batchBytes = 0
-        time = Date.now()
+          console.log(
+            '...processed',
+            processedCnt,
+            Math.round(processedBytes / 1024 / 1024) + 'MB',
+            'moved',
+            movedCnt,
+            Math.round(movedBytes / 1024 / 1024) + 'MB',
+            '+' + Math.round(batchBytes / 1024 / 1024) + 'MB',
+            'skipped',
+            skippedCnt,
+            Math.round(duration / 1000) + 's'
+          )
+
+          batchBytes = 0
+          time = Date.now()
+        }
       }
     }
 
diff --git a/packages/core/src/server.ts b/packages/core/src/server.ts
index b6c86d422f..8ae4596a6e 100644
--- a/packages/core/src/server.ts
+++ b/packages/core/src/server.ts
@@ -31,7 +31,7 @@ export interface DocInfo {
  * @public
  */
 export interface StorageIterator {
-  next: (ctx: MeasureContext) => Promise<DocInfo | undefined>
+  next: (ctx: MeasureContext) => Promise<DocInfo[]>
   close: (ctx: MeasureContext) => Promise<void>
 }
 
diff --git a/packages/storage/src/index.ts b/packages/storage/src/index.ts
index 95a0e97cc0..5a1a850d42 100644
--- a/packages/storage/src/index.ts
+++ b/packages/storage/src/index.ts
@@ -24,7 +24,7 @@ export interface UploadedObjectInfo {
 }
 
 export interface BlobStorageIterator {
-  next: () => Promise<ListBlobResult | undefined>
+  next: () => Promise<ListBlobResult[]>
   close: () => Promise<void>
 }
 
@@ -99,7 +99,7 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
 
   find (ctx: MeasureContext, workspaceId: WorkspaceId): StorageIterator {
     return {
-      next: async (ctx) => undefined,
+      next: async (ctx) => [],
       close: async (ctx) => {}
     }
   }
@@ -120,8 +120,8 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
 
   async listStream (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<BlobStorageIterator> {
     return {
-      next: async (): Promise<ListBlobResult | undefined> => {
-        return undefined
+      next: async (): Promise<ListBlobResult[]> => {
+        return []
       },
       close: async () => {}
     }
@@ -179,14 +179,16 @@ export async function removeAllObjects (
   const iterator = await storage.listStream(ctx, workspaceId)
   let bulk: string[] = []
   while (true) {
-    const obj = await iterator.next()
-    if (obj === undefined) {
+    const objs = await iterator.next()
+    if (objs.length === 0) {
       break
     }
-    bulk.push(obj.storageId)
-    if (bulk.length > 50) {
-      await storage.remove(ctx, workspaceId, bulk)
-      bulk = []
+    for (const obj of objs) {
+      bulk.push(obj.storageId)
+      if (bulk.length > 50) {
+        await storage.remove(ctx, workspaceId, bulk)
+        bulk = []
+      }
     }
   }
   if (bulk.length > 0) {
@@ -206,10 +208,10 @@ export async function objectsToArray (
   const bulk: ListBlobResult[] = []
   while (true) {
     const obj = await iterator.next()
-    if (obj === undefined) {
+    if (obj.length === 0) {
       break
     }
-    bulk.push(obj)
+    bulk.push(...obj)
   }
   await iterator.close()
   return bulk
diff --git a/pods/server/Dockerfile b/pods/server/Dockerfile
index 555b5c694d..938ff0edc2 100644
--- a/pods/server/Dockerfile
+++ b/pods/server/Dockerfile
@@ -2,7 +2,6 @@ FROM node:20
 
 WORKDIR /usr/src/app
 RUN npm install --ignore-scripts=false --verbose bufferutil utf-8-validate @mongodb-js/zstd snappy msgpackr msgpackr-extract --unsafe-perm
-RUN npm install --ignore-scripts=false --verbose uNetworking/uWebSockets.js#v20.47.0
 
 RUN apt-get update
 RUN apt-get install libjemalloc2
@@ -10,7 +9,6 @@ RUN apt-get install libjemalloc2
 ENV LD_PRELOAD=libjemalloc.so.2
 ENV MALLOC_CONF=dirty_decay_ms:1000,narenas:2,background_thread:true
 
-RUN mv node_modules/uWebSockets.js/*.node .
 COPY bundle/bundle.js ./
 COPY bundle/bundle.js.map ./
 
diff --git a/server/backup/src/backup.ts b/server/backup/src/backup.ts
index d92f368002..f464372c88 100644
--- a/server/backup/src/backup.ts
+++ b/server/backup/src/backup.ts
@@ -41,7 +41,6 @@ import { BlobClient, createClient } from '@hcengineering/server-client'
 import { fullTextPushStagePrefix, type StorageAdapter } from '@hcengineering/server-core'
 import { generateToken } from '@hcengineering/server-token'
 import { connect } from '@hcengineering/server-tool'
-import { mkdtemp, writeFile } from 'node:fs/promises'
 import { PassThrough } from 'node:stream'
 import { createGzip } from 'node:zlib'
 import { join } from 'path'
@@ -488,6 +487,16 @@ async function cleanDomain (ctx: MeasureContext, connection: CoreClient & Backup
   }
 }
 
+function doTrimHash (s: string | undefined): string {
+  if (s == null) {
+    return ''
+  }
+  if (s.startsWith('"') && s.endsWith('"')) {
+    return s.slice(1, s.length - 1)
+  }
+  return s
+}
+
 /**
  * @public
  */
@@ -526,11 +535,15 @@ export async function backup (
 
   let canceled = false
   let timer: any
+  let ops = 0
 
   if (options.timeout > 0) {
-    timer = setTimeout(() => {
-      ctx.error('Timeout during backup', { workspace: workspaceId.name, timeout: options.timeout / 1000 })
-      canceled = true
+    timer = setInterval(() => {
+      if (ops === 0) {
+        ctx.error('Timeout during backup', { workspace: workspaceId.name, timeout: options.timeout / 1000 })
+        ops = 0
+        canceled = true
+      }
     }, options.timeout)
   }
 
@@ -545,8 +558,6 @@ export async function backup (
   const blobClient = new BlobClient(transactorUrl, token, workspaceId, { storageAdapter: options.storageAdapter })
   ctx.info('starting backup', { workspace: workspaceId.name })
 
-  let tmpDir: string | undefined
-
   try {
     const domains = [
       ...connection
@@ -613,6 +624,7 @@ export async function backup (
       if (size == null || Number.isNaN(size)) {
         return
       }
+      ops++
       downloaded += size
       const newDownloadedMb = Math.round(downloaded / (1024 * 1024))
       const newId = Math.round(newDownloadedMb / 10)
@@ -641,6 +653,7 @@ export async function backup (
         try {
           const currentChunk = await ctx.with('loadChunk', {}, () => connection.loadChunk(domain, idx, options.recheck))
           idx = currentChunk.idx
+          ops++
 
           let needRetrieve: Ref<Doc>[] = []
           let currentNeedRetrieveSize = 0
@@ -656,17 +669,18 @@ export async function backup (
               })
               st = Date.now()
             }
-            const kHash = digest.get(id as Ref<Doc>)
+            const _hash = doTrimHash(hash)
+            const kHash = doTrimHash(digest.get(id as Ref<Doc>))
             if (kHash !== undefined) {
               digest.delete(id as Ref<Doc>)
-              if (kHash !== hash) {
-                changes.updated.set(id as Ref<Doc>, hash)
+              if (kHash !== _hash) {
+                changes.updated.set(id as Ref<Doc>, _hash)
                 needRetrieve.push(id as Ref<Doc>)
                 currentNeedRetrieveSize += size
                 changed++
               }
             } else {
-              changes.added.set(id as Ref<Doc>, hash)
+              changes.added.set(id as Ref<Doc>, _hash)
               needRetrieve.push(id as Ref<Doc>)
               changed++
               currentNeedRetrieveSize += size
@@ -728,19 +742,13 @@ export async function backup (
       }
 
       // Cumulative digest
-      const digest = await ctx.with(
-        'load-digest',
-        {},
-        async (ctx) => await loadDigest(ctx, storage, backupInfo.snapshots, domain)
-      )
+      const digest = await ctx.with('load-digest', {}, (ctx) => loadDigest(ctx, storage, backupInfo.snapshots, domain))
 
       let _pack: Pack | undefined
       let addedDocuments = 0
 
-      let { changed, needRetrieveChunks } = await ctx.with(
-        'load-chunks',
-        { domain },
-        async (ctx) => await loadChangesFromServer(ctx, domain, digest, changes)
+      let { changed, needRetrieveChunks } = await ctx.with('load-chunks', { domain }, (ctx) =>
+        loadChangesFromServer(ctx, domain, digest, changes)
       )
 
       if (needRetrieveChunks.length > 0) {
@@ -761,6 +769,7 @@ export async function backup (
         let docs: Doc[] = []
         try {
           docs = await ctx.with('load-docs', {}, async (ctx) => await connection.loadDocs(domain, needRetrieve))
+          ops++
         } catch (err: any) {
           ctx.error('error loading docs', { domain, err, workspace: workspaceId.name })
           // Put back.
@@ -876,16 +885,12 @@ export async function backup (
 
               const finalBuffer = Buffer.concat(buffers)
               if (finalBuffer.length !== blob.size) {
-                tmpDir = tmpDir ?? (await mkdtemp('backup', {}))
-                const tmpFile = join(tmpDir, blob._id)
-                await writeFile(tmpFile, finalBuffer)
-                await writeFile(tmpFile + '.json', JSON.stringify(blob, undefined, 2))
                 ctx.error('download blob size mismatch', {
                   _id: blob._id,
                   contentType: blob.contentType,
                   size: blob.size,
-                  provider: blob.provider,
-                  tempDir: tmpDir
+                  bufferSize: finalBuffer.length,
+                  provider: blob.provider
                 })
               }
               _pack.entry({ name: d._id + '.json' }, descrJson, (err) => {
@@ -975,7 +980,7 @@ export async function backup (
     }
     ctx.end()
     if (options.timeout !== -1) {
-      clearTimeout(timer)
+      clearInterval(timer)
     }
   }
 }
@@ -1200,22 +1205,12 @@ export async function restore (
       workspace: workspaceId.name
     })
 
-    const doTrim = (s: string | undefined): string | undefined => {
-      if (s == null) {
-        return s
-      }
-      if (s.startsWith('"') && s.endsWith('"')) {
-        return s.slice(1, s.length - 1)
-      }
-      return s
-    }
-
     // Let's find difference
     const docsToAdd = new Map(
       Array.from(changeset.entries()).filter(
         ([it]) =>
           !serverChangeset.has(it) ||
-          (serverChangeset.has(it) && doTrim(serverChangeset.get(it)) !== doTrim(changeset.get(it)))
+          (serverChangeset.has(it) && doTrimHash(serverChangeset.get(it)) !== doTrimHash(changeset.get(it)))
       )
     )
     const docsToRemove = Array.from(serverChangeset.keys()).filter((it) => !changeset.has(it))
diff --git a/server/core/src/__tests__/memAdapters.ts b/server/core/src/__tests__/memAdapters.ts
index a3109c6a59..02060086e2 100644
--- a/server/core/src/__tests__/memAdapters.ts
+++ b/server/core/src/__tests__/memAdapters.ts
@@ -56,7 +56,7 @@ export class MemStorageAdapter implements StorageAdapter {
     const files = Array.from(this.files.values()).filter((it) => it.workspace === workspaceId.name)
     return {
       next: async () => {
-        return files.shift()
+        return files.splice(0, 100)
       },
       close: async () => {}
     }
@@ -189,8 +189,7 @@ export class MemRawDBAdapter implements RawDBAdapter {
     }
     return {
       next: async () => {
-        const doc = result.shift()
-        return doc
+        return result.splice(0, 50)
       },
       close: async () => {}
     }
diff --git a/server/core/src/adapter.ts b/server/core/src/adapter.ts
index 2c14a685dd..5d25a954b6 100644
--- a/server/core/src/adapter.ts
+++ b/server/core/src/adapter.ts
@@ -57,7 +57,7 @@ export interface DomainHelper {
 }
 
 export interface RawDBAdapterStream<T extends Doc> {
-  next: () => Promise<T | undefined>
+  next: () => Promise<T[]>
   close: () => Promise<void>
 }
 
diff --git a/server/core/src/mem.ts b/server/core/src/mem.ts
index 60fa7c117a..8b615044d6 100644
--- a/server/core/src/mem.ts
+++ b/server/core/src/mem.ts
@@ -86,7 +86,7 @@ export class DummyDbAdapter implements DbAdapter {
 
   find (ctx: MeasureContext, domain: Domain): StorageIterator {
     return {
-      next: async () => undefined,
+      next: async () => [],
       close: async () => {}
     }
   }
diff --git a/server/core/src/server/aggregator.ts b/server/core/src/server/aggregator.ts
index d000c7a2b0..e176aeb6b0 100644
--- a/server/core/src/server/aggregator.ts
+++ b/server/core/src/server/aggregator.ts
@@ -99,31 +99,20 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
   find (ctx: MeasureContext, workspaceId: WorkspaceId): StorageIterator {
     const storageIterator = this.makeStorageIterator(ctx, workspaceId)
 
-    let buffer: ListBlobResult[] = []
-
     return {
-      next: async (ctx) => {
-        const docInfo = await storageIterator.next()
-        if (docInfo !== undefined) {
-          buffer.push(docInfo)
+      next: async () => {
+        const docInfos = await storageIterator.next()
+        if (docInfos.length > 0) {
+          await this.doSyncDocs(ctx, workspaceId, docInfos)
         }
-        if (buffer.length > 50) {
-          await this.doSyncDocs(ctx, workspaceId, buffer)
 
-          buffer = []
-        }
-        if (docInfo !== undefined) {
-          return {
-            hash: docInfo.etag,
-            id: docInfo._id,
-            size: docInfo.size
-          }
-        }
+        return docInfos.map((it) => ({
+          hash: it.etag,
+          id: it._id,
+          size: it.size
+        }))
       },
       close: async (ctx) => {
-        if (buffer.length > 0) {
-          await this.doSyncDocs(ctx, workspaceId, buffer)
-        }
         await storageIterator.close()
       }
     }
@@ -134,22 +123,21 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
     let iterator: BlobStorageIterator | undefined
     return {
       next: async () => {
-        while (true) {
-          if (iterator === undefined && adapters.length > 0) {
-            iterator = await (adapters.shift() as StorageAdapter).listStream(ctx, workspaceId)
-          }
-          if (iterator === undefined) {
-            return undefined
-          }
-          const docInfo = await iterator.next()
-          if (docInfo !== undefined) {
-            // We need to check if our stored version is fine
-            return docInfo
-          } else {
-            // We need to take next adapter
-            await iterator.close()
-            iterator = undefined
-          }
+        if (iterator === undefined && adapters.length > 0) {
+          iterator = await (adapters.shift() as StorageAdapter).listStream(ctx, workspaceId)
+        }
+        if (iterator === undefined) {
+          return []
+        }
+        const docInfos = await iterator.next()
+        if (docInfos.length > 0) {
+          // We need to check if our stored version is fine
+          return docInfos
+        } else {
+          // We need to take next adapter
+          await iterator.close()
+          iterator = undefined
+          return []
         }
       },
       close: async () => {
@@ -227,7 +215,7 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
   async listStream (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<BlobStorageIterator> {
     const data = await this.dbAdapter.findStream<Blob>(ctx, workspaceId, DOMAIN_BLOB, {})
     return {
-      next: async (): Promise<ListBlobResult | undefined> => {
+      next: async (): Promise<ListBlobResult[]> => {
         return await data.next()
       },
       close: async () => {
diff --git a/server/core/src/storage.ts b/server/core/src/storage.ts
index e252bd4bd9..8d436aa733 100644
--- a/server/core/src/storage.ts
+++ b/server/core/src/storage.ts
@@ -9,6 +9,7 @@ import {
   type StorageIterator,
   type WorkspaceId
 } from '@hcengineering/core'
+import { estimateDocSize } from './utils'
 
 export * from '@hcengineering/storage'
 
@@ -19,7 +20,7 @@ export function getBucketId (workspaceId: WorkspaceId): string {
   return toWorkspaceString(workspaceId)
 }
 
-const chunkSize = 2 * 1024 * 1024
+const chunkSize = 512 * 1024
 
 /**
  * @public
@@ -70,14 +71,15 @@ export class BackupClientOps {
       const docs: DocInfo[] = []
 
       while (size < chunkSize) {
-        const doc = await chunk.iterator.next(ctx)
-        if (doc === undefined) {
+        const _docs = await chunk.iterator.next(ctx)
+        if (_docs.length === 0) {
           chunk.finished = true
           break
         }
-
-        size += doc.size
-        docs.push(doc)
+        for (const doc of _docs) {
+          size += estimateDocSize(doc)
+          docs.push(doc)
+        }
       }
 
       return {
diff --git a/server/minio/src/index.ts b/server/minio/src/index.ts
index f0987da53f..1a56fba05c 100644
--- a/server/minio/src/index.ts
+++ b/server/minio/src/index.ts
@@ -192,7 +192,7 @@ export class MinioService implements StorageAdapter {
 
     const rootPrefix = this.rootPrefix(workspaceId)
     return {
-      next: async (): Promise<ListBlobResult | undefined> => {
+      next: async (): Promise<ListBlobResult[]> => {
         try {
           if (stream === undefined && !done) {
             const rprefix = rootPrefix ?? ''
@@ -227,7 +227,7 @@ export class MinioService implements StorageAdapter {
                 })
               }
               onNext()
-              if (buffer.length > 5) {
+              if (buffer.length > 100) {
                 stream?.pause()
               }
             })
@@ -236,24 +236,24 @@ export class MinioService implements StorageAdapter {
           const msg = (err?.message as string) ?? ''
           if (msg.includes('Invalid bucket name') || msg.includes('The specified bucket does not exist')) {
             hasMore = false
-            return
+            return []
           }
           error = err
         }
 
         if (buffer.length > 0) {
-          return buffer.shift()
+          return buffer.splice(0, 50)
         }
         if (!hasMore) {
-          return undefined
+          return []
         }
-        return await new Promise<ListBlobResult | undefined>((resolve, reject) => {
+        return await new Promise<ListBlobResult[]>((resolve, reject) => {
           onNext = () => {
             if (error != null) {
               reject(error)
             }
             onNext = () => {}
-            resolve(buffer.shift())
+            resolve(buffer.splice(0, 50))
           }
           stream?.resume()
         })
diff --git a/server/mongo/src/rawAdapter.ts b/server/mongo/src/rawAdapter.ts
index e1b0a1135d..f677e4eda5 100644
--- a/server/mongo/src/rawAdapter.ts
+++ b/server/mongo/src/rawAdapter.ts
@@ -105,7 +105,17 @@ export function createRawMongoDBAdapter (url: string): RawDBAdapter {
       const { cursor } = await getCursor(workspace, domain, query, options)
 
       return {
-        next: async () => (await cursor.next()) ?? undefined,
+        next: async () => {
+          const result: T[] = []
+          const doc = await cursor.next()
+          if (doc != null) {
+            result.push(doc)
+          }
+          if (cursor.bufferedCount() > 0) {
+            result.push(...cursor.readBufferedDocuments())
+          }
+          return result
+        },
         close: async () => {
           await cursor.close()
         }
diff --git a/server/mongo/src/storage.ts b/server/mongo/src/storage.ts
index 6e5a2d733c..48b982d49b 100644
--- a/server/mongo/src/storage.ts
+++ b/server/mongo/src/storage.ts
@@ -16,7 +16,6 @@
 import core, {
   DOMAIN_MODEL,
   DOMAIN_TX,
-  type Iterator,
   SortingOrder,
   TxProcessor,
   addOperation,
@@ -30,6 +29,7 @@ import core, {
   type AttachedDoc,
   type Class,
   type Doc,
+  type DocInfo,
   type DocumentQuery,
   type DocumentUpdate,
   type Domain,
@@ -38,6 +38,7 @@ import core, {
   type FindOptions,
   type FindResult,
   type Hierarchy,
+  type Iterator,
   type Lookup,
   type MeasureContext,
   type Mixin,
@@ -135,7 +136,7 @@ export async function toArray<T> (cursor: AbstractCursor<T>): Promise<T[]> {
 }
 
 export interface DbAdapterOptions {
-  calculateHash?: (doc: Doc) => string
+  calculateHash?: (doc: Doc) => { digest: string, size: number }
 }
 
 abstract class MongoAdapterBase implements DbAdapter {
@@ -1034,44 +1035,17 @@ abstract class MongoAdapterBase implements DbAdapter {
           iterator = coll.find({ '%hash%': { $in: ['', null] } })
           d = await ctx.with('next', { mode }, async () => await iterator.next())
         }
-        if (d == null) {
-          return undefined
+        const result: DocInfo[] = []
+        if (d != null) {
+          result.push(this.toDocInfo(d, bulkUpdate))
         }
-        let digest: string | null = (d as any)['%hash%']
-        if ('%hash%' in d) {
-          delete d['%hash%']
-        }
-        const pos = (digest ?? '').indexOf('|')
-        if (digest == null || digest === '') {
-          const cs = ctx.newChild('calc-size', {})
-          const size = estimateDocSize(d)
-          cs.end()
-
-          if (this.options?.calculateHash !== undefined) {
-            digest = this.options.calculateHash(d)
-          } else {
-            const hash = createHash('sha256')
-            updateHashForDoc(hash, d)
-            digest = hash.digest('base64')
-          }
-
-          bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
-
-          await ctx.with('flush', {}, async () => {
-            await flush()
-          })
-          return {
-            id: d._id,
-            hash: digest,
-            size
-          }
-        } else {
-          return {
-            id: d._id,
-            hash: digest.slice(0, pos),
-            size: parseInt(digest.slice(pos + 1), 16)
-          }
+        if (iterator.bufferedCount() > 0) {
+          result.push(...iterator.readBufferedDocuments().map((it) => this.toDocInfo(it, bulkUpdate)))
         }
+        await ctx.with('flush', {}, async () => {
+          await flush()
+        })
+        return result
       },
       close: async () => {
         await ctx.with('flush', {}, async () => {
@@ -1085,6 +1059,38 @@ abstract class MongoAdapterBase implements DbAdapter {
     }
   }
 
+  private toDocInfo (d: Doc, bulkUpdate: Map<Ref<Doc>, string>): DocInfo {
+    let digest: string | null = (d as any)['%hash%']
+    if ('%hash%' in d) {
+      delete d['%hash%']
+    }
+    const pos = (digest ?? '').indexOf('|')
+    if (digest == null || digest === '') {
+      let size = estimateDocSize(d)
+
+      if (this.options?.calculateHash !== undefined) {
+        ;({ digest, size } = this.options.calculateHash(d))
+      } else {
+        const hash = createHash('sha256')
+        updateHashForDoc(hash, d)
+        digest = hash.digest('base64')
+      }
+
+      bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
+      return {
+        id: d._id,
+        hash: digest,
+        size
+      }
+    } else {
+      return {
+        id: d._id,
+        hash: digest.slice(0, pos),
+        size: parseInt(digest.slice(pos + 1), 16)
+      }
+    }
+  }
+
   async load (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
     return await ctx.with('load', { domain }, async () => {
       if (docs.length === 0) {
diff --git a/server/mongo/src/utils.ts b/server/mongo/src/utils.ts
index 8f555a3ff8..616e8dab30 100644
--- a/server/mongo/src/utils.ts
+++ b/server/mongo/src/utils.ts
@@ -23,7 +23,7 @@ import {
 } from '@hcengineering/core'
 import { PlatformError, unknownStatus } from '@hcengineering/platform'
 import { type DomainHelperOperations } from '@hcengineering/server-core'
-import { MongoClient, type Collection, type Db, type Document, type MongoClientOptions } from 'mongodb'
+import { MongoClient, type Collection, type Db, type Document } from 'mongodb'
 
 const connections = new Map<string, MongoClientReferenceImpl>()
 
@@ -121,31 +121,20 @@ export class ClientRef implements MongoClientReference {
  * Initialize a workspace connection to DB
  * @public
  */
-export function getMongoClient (uri: string, options?: MongoClientOptions): MongoClientReference {
+export function getMongoClient (uri: string): MongoClientReference {
   const extraOptions = JSON.parse(process.env.MONGO_OPTIONS ?? '{}')
-  const key = `${uri}${process.env.MONGO_OPTIONS ?? '{}'}_${JSON.stringify(options ?? {})}`
+  const key = `${uri}${process.env.MONGO_OPTIONS ?? '{}'}`
   let existing = connections.get(key)
 
-  const allOptions: MongoClientOptions = {
-    ...options,
-    ...extraOptions
-  }
-
-  // Make poll size stable
-  if (allOptions.maxPoolSize !== undefined) {
-    allOptions.minPoolSize = allOptions.maxPoolSize
-  }
-  allOptions.monitorCommands = false
-  allOptions.noDelay = true
-
   // If not created or closed
   if (existing === undefined) {
     existing = new MongoClientReferenceImpl(
       MongoClient.connect(uri, {
+        retryReads: true,
         appName: 'transactor',
         enableUtf8Validation: false,
 
-        ...allOptions
+        ...extraOptions
       }),
       () => {
         connections.delete(key)
diff --git a/server/postgres/src/storage.ts b/server/postgres/src/storage.ts
index 5cb5eeceb1..7f50259e79 100644
--- a/server/postgres/src/storage.ts
+++ b/server/postgres/src/storage.ts
@@ -18,6 +18,7 @@ import core, {
   type AttachedDoc,
   type Class,
   type Doc,
+  type DocInfo,
   type DocumentQuery,
   type DocumentUpdate,
   type Domain,
@@ -941,12 +942,12 @@ abstract class PostgresAdapterBase implements DbAdapter {
       )
     }
 
-    const next = async (): Promise<Doc | null> => {
-      const result = await client.query(`FETCH 1 FROM ${cursorName}`)
+    const next = async (limit: number): Promise<Doc[]> => {
+      const result = await client.query(`FETCH ${limit} FROM ${cursorName}`)
       if (result.rows.length === 0) {
-        return null
+        return []
       }
-      return result.rows[0] !== undefined ? parseDoc(result.rows[0]) : null
+      return result.rows.filter((it) => it != null).map((it) => parseDoc(it))
     }
 
     const flush = async (flush = false): Promise<void> => {
@@ -975,47 +976,51 @@ abstract class PostgresAdapterBase implements DbAdapter {
           await init('_id, data', "data ->> '%hash%' IS NOT NULL AND data ->> '%hash%' <> ''")
           initialized = true
         }
-        let d = await ctx.with('next', { mode }, async () => await next())
-        if (d == null && mode === 'hashed') {
+        let docs = await ctx.with('next', { mode }, async () => await next(50))
+        if (docs.length === 0 && mode === 'hashed') {
           await close(cursorName)
           mode = 'non_hashed'
           await init('*', "data ->> '%hash%' IS NULL OR data ->> '%hash%' = ''")
-          d = await ctx.with('next', { mode }, async () => await next())
+          docs = await ctx.with('next', { mode }, async () => await next(50))
         }
-        if (d == null) {
-          return undefined
+        if (docs.length === 0) {
+          return []
         }
-        let digest: string | null = (d as any)['%hash%']
-        if ('%hash%' in d) {
-          delete d['%hash%']
-        }
-        const pos = (digest ?? '').indexOf('|')
-        if (digest == null || digest === '') {
-          const cs = ctx.newChild('calc-size', {})
-          const size = estimateDocSize(d)
-          cs.end()
-
-          const hash = createHash('sha256')
-          updateHashForDoc(hash, d)
-          digest = hash.digest('base64')
-
-          bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
-
-          await ctx.with('flush', {}, async () => {
-            await flush()
-          })
-          return {
-            id: d._id,
-            hash: digest,
-            size
+        const result: DocInfo[] = []
+        for (const d of docs) {
+          let digest: string | null = (d as any)['%hash%']
+          if ('%hash%' in d) {
+            delete d['%hash%']
           }
-        } else {
-          return {
-            id: d._id,
-            hash: digest.slice(0, pos),
-            size: parseInt(digest.slice(pos + 1), 16)
+          const pos = (digest ?? '').indexOf('|')
+          if (digest == null || digest === '') {
+            const cs = ctx.newChild('calc-size', {})
+            const size = estimateDocSize(d)
+            cs.end()
+
+            const hash = createHash('sha256')
+            updateHashForDoc(hash, d)
+            digest = hash.digest('base64')
+
+            bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
+
+            await ctx.with('flush', {}, async () => {
+              await flush()
+            })
+            result.push({
+              id: d._id,
+              hash: digest,
+              size
+            })
+          } else {
+            result.push({
+              id: d._id,
+              hash: digest.slice(0, pos),
+              size: parseInt(digest.slice(pos + 1), 16)
+            })
           }
         }
+        return result
       },
       close: async () => {
         await ctx.with('flush', {}, async () => {
diff --git a/server/s3/src/index.ts b/server/s3/src/index.ts
index cc2c435481..1314ebd98e 100644
--- a/server/s3/src/index.ts
+++ b/server/s3/src/index.ts
@@ -239,9 +239,9 @@ export class S3Service implements StorageAdapter {
 
     const rootPrefix = this.rootPrefix(workspaceId)
     return {
-      next: async (): Promise<ListBlobResult | undefined> => {
+      next: async (): Promise<ListBlobResult[]> => {
         try {
-          if (hasMore && buffer.length === 0) {
+          while (hasMore && buffer.length < 50) {
             const res = await this.client.listObjectsV2({
               Bucket: this.getBucketId(workspaceId),
               Prefix: rootPrefix ?? '',
@@ -271,12 +271,7 @@ export class S3Service implements StorageAdapter {
         } catch (err: any) {
           ctx.error('Failed to get list', { error: err, workspaceId: workspaceId.name })
         }
-        if (buffer.length > 0) {
-          return buffer.shift()
-        }
-        if (!hasMore) {
-          return undefined
-        }
+        return buffer.splice(0, 50)
       },
       close: async () => {}
     }
diff --git a/server/server-storage/src/blobStorage.ts b/server/server-storage/src/blobStorage.ts
index 5f9202d98e..a44d147ad0 100644
--- a/server/server-storage/src/blobStorage.ts
+++ b/server/server-storage/src/blobStorage.ts
@@ -170,9 +170,9 @@ export async function createStorageDataAdapter (
     calculateHash: (d) => {
       const blob = d as Blob
       if (storageEx?.adapters !== undefined && storageEx.adapters.get(blob.provider) === undefined) {
-        return blob.etag + '_' + storageEx.defaultAdapter // Replace tag to be able to move to new provider
+        return { digest: blob.etag + '_' + storageEx.defaultAdapter, size: blob.size }
       }
-      return blob.etag
+      return { digest: blob.etag, size: blob.size }
     }
   })
   return new StorageBlobAdapter(workspaceId, storage, ctx, blobAdapter)

From 5fcc42f9824a4fb38249bad8878f1fb2054b6ee0 Mon Sep 17 00:00:00 2001
From: Andrey Sobolev <haiodo@gmail.com>
Date: Tue, 17 Sep 2024 02:28:45 +0700
Subject: [PATCH 3/6] Revert "UBERF-8122: Fix backup service"

This reverts commit b06f433babd8c5dc89aa4e1f63599d6b1b0ff2a3.

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
---
 .vscode/launch.json                      |   7 +-
 dev/tool/package.json                    |   3 +-
 dev/tool/src/clean.ts                    |  29 +++---
 dev/tool/src/storage.ts                  | 125 +++++++++++------------
 packages/core/src/server.ts              |   2 +-
 packages/storage/src/index.ts            |  26 +++--
 pods/server/Dockerfile                   |   2 +
 server/backup/src/backup.ts              |  69 +++++++------
 server/core/src/__tests__/memAdapters.ts |   5 +-
 server/core/src/adapter.ts               |   2 +-
 server/core/src/mem.ts                   |   2 +-
 server/core/src/server/aggregator.ts     |  66 +++++++-----
 server/core/src/storage.ts               |  14 ++-
 server/minio/src/index.ts                |  14 +--
 server/mongo/src/rawAdapter.ts           |  12 +--
 server/mongo/src/storage.ts              |  82 +++++++--------
 server/mongo/src/utils.ts                |  21 +++-
 server/postgres/src/storage.ts           |  79 +++++++-------
 server/s3/src/index.ts                   |  11 +-
 server/server-storage/src/blobStorage.ts |   4 +-
 20 files changed, 286 insertions(+), 289 deletions(-)

diff --git a/.vscode/launch.json b/.vscode/launch.json
index ac8ccac484..2e571562ed 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -221,7 +221,7 @@
       "name": "Debug backup tool",
       "type": "node",
       "request": "launch",
-      "args": ["src/index.ts", "backup", "../../../dump/platform2", "platform"],
+      "args": ["src/index.ts", "backup", "../../../dump/alex-staff-agency2", "alex-staff-agency"],
       "env": {
         "MINIO_ACCESS_KEY": "minioadmin",
         "MINIO_SECRET_KEY": "minioadmin",
@@ -234,10 +234,7 @@
       "runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
       "sourceMaps": true,
       "cwd": "${workspaceRoot}/dev/tool",
-      "protocol": "inspector",
-      "outputCapture": "std",
-      "runtimeVersion": "20",
-      "showAsyncStacks": true,
+      "protocol": "inspector"
     },
     {
       "name": "Debug tool upgrade",
diff --git a/dev/tool/package.json b/dev/tool/package.json
index bd62efdc73..476c331442 100644
--- a/dev/tool/package.json
+++ b/dev/tool/package.json
@@ -14,13 +14,12 @@
     "_phase:bundle": "rushx bundle",
     "_phase:docker-build": "rushx docker:build",
     "_phase:docker-staging": "rushx docker:staging",
-    "bundle": "mkdir -p bundle && esbuild src/__start.ts --bundle --keep-names --sourcemap=external --platform=node --define:process.env.MODEL_VERSION=$(node ../../common/scripts/show_version.js) --define:process.env.GIT_REVISION=$(../../common/scripts/git_version.sh) --log-level=error --outfile=bundle/bundle.js",
+    "bundle": "mkdir -p bundle && esbuild src/__start.ts --bundle --minify --platform=node --define:process.env.MODEL_VERSION=$(node ../../common/scripts/show_version.js) --define:process.env.GIT_REVISION=$(../../common/scripts/git_version.sh) > bundle/bundle.js",
     "docker:build": "../../common/scripts/docker_build.sh hardcoreeng/tool",
     "docker:tbuild": "docker build -t hardcoreeng/tool . --platform=linux/amd64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/tool",
     "docker:staging": "../../common/scripts/docker_tag.sh hardcoreeng/tool staging",
     "docker:push": "../../common/scripts/docker_tag.sh hardcoreeng/tool",
     "run-local": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost MONGO_URL=mongodb://localhost:27017 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --max-old-space-size=18000 ./bundle/bundle.js",
-    "run-local-brk": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost MONGO_URL=mongodb://localhost:27017 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --inspect-brk --enable-source-maps --max-old-space-size=18000 ./bundle/bundle.js",
     "run": "rush bundle --to @hcengineering/tool >/dev/null && cross-env node --max-old-space-size=8000 ./bundle/bundle.js",
     "upgrade": "rushx run-local upgrade",
     "format": "format src",
diff --git a/dev/tool/src/clean.ts b/dev/tool/src/clean.ts
index cd199eec1e..612dcb0a92 100644
--- a/dev/tool/src/clean.ts
+++ b/dev/tool/src/clean.ts
@@ -104,15 +104,12 @@ export async function cleanWorkspace (
     const minioList = await storageAdapter.listStream(ctx, workspaceId)
     const toClean: string[] = []
     while (true) {
-      const mvFiles = await minioList.next()
-      if (mvFiles.length === 0) {
+      const mv = await minioList.next()
+      if (mv === undefined) {
         break
       }
-
-      for (const mv of mvFiles) {
-        if (!files.has(mv._id)) {
-          toClean.push(mv._id)
-        }
+      if (!files.has(mv._id)) {
+        toClean.push(mv._id)
       }
     }
     await storageAdapter.remove(ctx, workspaceId, toClean)
@@ -195,18 +192,16 @@ export async function fixMinioBW (
   const list = await storageService.listStream(ctx, workspaceId)
   let removed = 0
   while (true) {
-    const objs = await list.next()
-    if (objs.length === 0) {
+    const obj = await list.next()
+    if (obj === undefined) {
       break
     }
-    for (const obj of objs) {
-      if (obj.modifiedOn < from) continue
-      if ((obj._id as string).includes('%preview%')) {
-        await storageService.remove(ctx, workspaceId, [obj._id])
-        removed++
-        if (removed % 100 === 0) {
-          console.log('removed: ', removed)
-        }
+    if (obj.modifiedOn < from) continue
+    if ((obj._id as string).includes('%preview%')) {
+      await storageService.remove(ctx, workspaceId, [obj._id])
+      removed++
+      if (removed % 100 === 0) {
+        console.log('removed: ', removed)
       }
     }
   }
diff --git a/dev/tool/src/storage.ts b/dev/tool/src/storage.ts
index e5a305a910..1dc0f55d96 100644
--- a/dev/tool/src/storage.ts
+++ b/dev/tool/src/storage.ts
@@ -40,22 +40,20 @@ export async function syncFiles (
       const iterator = await adapter.listStream(ctx, workspaceId)
       try {
         while (true) {
-          const dataBulk = await iterator.next()
-          if (dataBulk.length === 0) break
+          const data = await iterator.next()
+          if (data === undefined) break
 
-          for (const data of dataBulk) {
-            const blob = await exAdapter.stat(ctx, workspaceId, data._id)
-            if (blob !== undefined) continue
+          const blob = await exAdapter.stat(ctx, workspaceId, data._id)
+          if (blob !== undefined) continue
 
-            await exAdapter.syncBlobFromStorage(ctx, workspaceId, data._id, name)
+          await exAdapter.syncBlobFromStorage(ctx, workspaceId, data._id, name)
 
-            count += 1
-            if (count % 100 === 0) {
-              const duration = Date.now() - time
-              time = Date.now()
+          count += 1
+          if (count % 100 === 0) {
+            const duration = Date.now() - time
+            time = Date.now()
 
-              console.log('...processed', count, Math.round(duration / 1000) + 's')
-            }
+            console.log('...processed', count, Math.round(duration / 1000) + 's')
           }
         }
         console.log('processed', count)
@@ -114,67 +112,64 @@ async function processAdapter (
   const iterator = await source.listStream(ctx, workspaceId)
   try {
     while (true) {
-      const dataBulk = await iterator.next()
-      if (dataBulk.length === 0) break
+      const data = await iterator.next()
+      if (data === undefined) break
 
-      for (const data of dataBulk) {
-        const blob =
-          (await exAdapter.stat(ctx, workspaceId, data._id)) ?? (await source.stat(ctx, workspaceId, data._id))
+      const blob = (await exAdapter.stat(ctx, workspaceId, data._id)) ?? (await source.stat(ctx, workspaceId, data._id))
 
-        if (blob === undefined) {
-          console.error('blob not found', data._id)
-          continue
+      if (blob === undefined) {
+        console.error('blob not found', data._id)
+        continue
+      }
+
+      if (blob.provider !== exAdapter.defaultAdapter) {
+        if (blob.size <= params.blobSizeLimitMb * 1024 * 1024) {
+          await rateLimiter.exec(async () => {
+            try {
+              await retryOnFailure(
+                ctx,
+                5,
+                async () => {
+                  await processFile(ctx, source, params.move ? exAdapter : target, workspaceId, blob)
+                },
+                50
+              )
+              movedCnt += 1
+              movedBytes += blob.size
+              batchBytes += blob.size
+            } catch (err) {
+              console.error('failed to process blob', data._id, err)
+            }
+          })
+        } else {
+          skippedCnt += 1
+          console.log('skipping large blob', data._id, Math.round(blob.size / 1024 / 1024))
         }
+      }
 
-        if (blob.provider !== exAdapter.defaultAdapter) {
-          if (blob.size <= params.blobSizeLimitMb * 1024 * 1024) {
-            await rateLimiter.exec(async () => {
-              try {
-                await retryOnFailure(
-                  ctx,
-                  5,
-                  async () => {
-                    await processFile(ctx, source, params.move ? exAdapter : target, workspaceId, blob)
-                  },
-                  50
-                )
-                movedCnt += 1
-                movedBytes += blob.size
-                batchBytes += blob.size
-              } catch (err) {
-                console.error('failed to process blob', data._id, err)
-              }
-            })
-          } else {
-            skippedCnt += 1
-            console.log('skipping large blob', data._id, Math.round(blob.size / 1024 / 1024))
-          }
-        }
+      processedCnt += 1
+      processedBytes += blob.size
 
-        processedCnt += 1
-        processedBytes += blob.size
+      if (processedCnt % 100 === 0) {
+        await rateLimiter.waitProcessing()
 
-        if (processedCnt % 100 === 0) {
-          await rateLimiter.waitProcessing()
+        const duration = Date.now() - time
 
-          const duration = Date.now() - time
+        console.log(
+          '...processed',
+          processedCnt,
+          Math.round(processedBytes / 1024 / 1024) + 'MB',
+          'moved',
+          movedCnt,
+          Math.round(movedBytes / 1024 / 1024) + 'MB',
+          '+' + Math.round(batchBytes / 1024 / 1024) + 'MB',
+          'skipped',
+          skippedCnt,
+          Math.round(duration / 1000) + 's'
+        )
 
-          console.log(
-            '...processed',
-            processedCnt,
-            Math.round(processedBytes / 1024 / 1024) + 'MB',
-            'moved',
-            movedCnt,
-            Math.round(movedBytes / 1024 / 1024) + 'MB',
-            '+' + Math.round(batchBytes / 1024 / 1024) + 'MB',
-            'skipped',
-            skippedCnt,
-            Math.round(duration / 1000) + 's'
-          )
-
-          batchBytes = 0
-          time = Date.now()
-        }
+        batchBytes = 0
+        time = Date.now()
       }
     }
 
diff --git a/packages/core/src/server.ts b/packages/core/src/server.ts
index 8ae4596a6e..b6c86d422f 100644
--- a/packages/core/src/server.ts
+++ b/packages/core/src/server.ts
@@ -31,7 +31,7 @@ export interface DocInfo {
  * @public
  */
 export interface StorageIterator {
-  next: (ctx: MeasureContext) => Promise<DocInfo[]>
+  next: (ctx: MeasureContext) => Promise<DocInfo | undefined>
   close: (ctx: MeasureContext) => Promise<void>
 }
 
diff --git a/packages/storage/src/index.ts b/packages/storage/src/index.ts
index 5a1a850d42..95a0e97cc0 100644
--- a/packages/storage/src/index.ts
+++ b/packages/storage/src/index.ts
@@ -24,7 +24,7 @@ export interface UploadedObjectInfo {
 }
 
 export interface BlobStorageIterator {
-  next: () => Promise<ListBlobResult[]>
+  next: () => Promise<ListBlobResult | undefined>
   close: () => Promise<void>
 }
 
@@ -99,7 +99,7 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
 
   find (ctx: MeasureContext, workspaceId: WorkspaceId): StorageIterator {
     return {
-      next: async (ctx) => [],
+      next: async (ctx) => undefined,
       close: async (ctx) => {}
     }
   }
@@ -120,8 +120,8 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
 
   async listStream (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<BlobStorageIterator> {
     return {
-      next: async (): Promise<ListBlobResult[]> => {
-        return []
+      next: async (): Promise<ListBlobResult | undefined> => {
+        return undefined
       },
       close: async () => {}
     }
@@ -179,16 +179,14 @@ export async function removeAllObjects (
   const iterator = await storage.listStream(ctx, workspaceId)
   let bulk: string[] = []
   while (true) {
-    const objs = await iterator.next()
-    if (objs.length === 0) {
+    const obj = await iterator.next()
+    if (obj === undefined) {
       break
     }
-    for (const obj of objs) {
-      bulk.push(obj.storageId)
-      if (bulk.length > 50) {
-        await storage.remove(ctx, workspaceId, bulk)
-        bulk = []
-      }
+    bulk.push(obj.storageId)
+    if (bulk.length > 50) {
+      await storage.remove(ctx, workspaceId, bulk)
+      bulk = []
     }
   }
   if (bulk.length > 0) {
@@ -208,10 +206,10 @@ export async function objectsToArray (
   const bulk: ListBlobResult[] = []
   while (true) {
     const obj = await iterator.next()
-    if (obj.length === 0) {
+    if (obj === undefined) {
       break
     }
-    bulk.push(...obj)
+    bulk.push(obj)
   }
   await iterator.close()
   return bulk
diff --git a/pods/server/Dockerfile b/pods/server/Dockerfile
index 938ff0edc2..555b5c694d 100644
--- a/pods/server/Dockerfile
+++ b/pods/server/Dockerfile
@@ -2,6 +2,7 @@ FROM node:20
 
 WORKDIR /usr/src/app
 RUN npm install --ignore-scripts=false --verbose bufferutil utf-8-validate @mongodb-js/zstd snappy msgpackr msgpackr-extract --unsafe-perm
+RUN npm install --ignore-scripts=false --verbose uNetworking/uWebSockets.js#v20.47.0
 
 RUN apt-get update
 RUN apt-get install libjemalloc2
@@ -9,6 +10,7 @@ RUN apt-get install libjemalloc2
 ENV LD_PRELOAD=libjemalloc.so.2
 ENV MALLOC_CONF=dirty_decay_ms:1000,narenas:2,background_thread:true
 
+RUN mv node_modules/uWebSockets.js/*.node .
 COPY bundle/bundle.js ./
 COPY bundle/bundle.js.map ./
 
diff --git a/server/backup/src/backup.ts b/server/backup/src/backup.ts
index f464372c88..d92f368002 100644
--- a/server/backup/src/backup.ts
+++ b/server/backup/src/backup.ts
@@ -41,6 +41,7 @@ import { BlobClient, createClient } from '@hcengineering/server-client'
 import { fullTextPushStagePrefix, type StorageAdapter } from '@hcengineering/server-core'
 import { generateToken } from '@hcengineering/server-token'
 import { connect } from '@hcengineering/server-tool'
+import { mkdtemp, writeFile } from 'node:fs/promises'
 import { PassThrough } from 'node:stream'
 import { createGzip } from 'node:zlib'
 import { join } from 'path'
@@ -487,16 +488,6 @@ async function cleanDomain (ctx: MeasureContext, connection: CoreClient & Backup
   }
 }
 
-function doTrimHash (s: string | undefined): string {
-  if (s == null) {
-    return ''
-  }
-  if (s.startsWith('"') && s.endsWith('"')) {
-    return s.slice(1, s.length - 1)
-  }
-  return s
-}
-
 /**
  * @public
  */
@@ -535,15 +526,11 @@ export async function backup (
 
   let canceled = false
   let timer: any
-  let ops = 0
 
   if (options.timeout > 0) {
-    timer = setInterval(() => {
-      if (ops === 0) {
-        ctx.error('Timeout during backup', { workspace: workspaceId.name, timeout: options.timeout / 1000 })
-        ops = 0
-        canceled = true
-      }
+    timer = setTimeout(() => {
+      ctx.error('Timeout during backup', { workspace: workspaceId.name, timeout: options.timeout / 1000 })
+      canceled = true
     }, options.timeout)
   }
 
@@ -558,6 +545,8 @@ export async function backup (
   const blobClient = new BlobClient(transactorUrl, token, workspaceId, { storageAdapter: options.storageAdapter })
   ctx.info('starting backup', { workspace: workspaceId.name })
 
+  let tmpDir: string | undefined
+
   try {
     const domains = [
       ...connection
@@ -624,7 +613,6 @@ export async function backup (
       if (size == null || Number.isNaN(size)) {
         return
       }
-      ops++
       downloaded += size
       const newDownloadedMb = Math.round(downloaded / (1024 * 1024))
       const newId = Math.round(newDownloadedMb / 10)
@@ -653,7 +641,6 @@ export async function backup (
         try {
           const currentChunk = await ctx.with('loadChunk', {}, () => connection.loadChunk(domain, idx, options.recheck))
           idx = currentChunk.idx
-          ops++
 
           let needRetrieve: Ref<Doc>[] = []
           let currentNeedRetrieveSize = 0
@@ -669,18 +656,17 @@ export async function backup (
               })
               st = Date.now()
             }
-            const _hash = doTrimHash(hash)
-            const kHash = doTrimHash(digest.get(id as Ref<Doc>))
+            const kHash = digest.get(id as Ref<Doc>)
             if (kHash !== undefined) {
               digest.delete(id as Ref<Doc>)
-              if (kHash !== _hash) {
-                changes.updated.set(id as Ref<Doc>, _hash)
+              if (kHash !== hash) {
+                changes.updated.set(id as Ref<Doc>, hash)
                 needRetrieve.push(id as Ref<Doc>)
                 currentNeedRetrieveSize += size
                 changed++
               }
             } else {
-              changes.added.set(id as Ref<Doc>, _hash)
+              changes.added.set(id as Ref<Doc>, hash)
               needRetrieve.push(id as Ref<Doc>)
               changed++
               currentNeedRetrieveSize += size
@@ -742,13 +728,19 @@ export async function backup (
       }
 
       // Cumulative digest
-      const digest = await ctx.with('load-digest', {}, (ctx) => loadDigest(ctx, storage, backupInfo.snapshots, domain))
+      const digest = await ctx.with(
+        'load-digest',
+        {},
+        async (ctx) => await loadDigest(ctx, storage, backupInfo.snapshots, domain)
+      )
 
       let _pack: Pack | undefined
       let addedDocuments = 0
 
-      let { changed, needRetrieveChunks } = await ctx.with('load-chunks', { domain }, (ctx) =>
-        loadChangesFromServer(ctx, domain, digest, changes)
+      let { changed, needRetrieveChunks } = await ctx.with(
+        'load-chunks',
+        { domain },
+        async (ctx) => await loadChangesFromServer(ctx, domain, digest, changes)
       )
 
       if (needRetrieveChunks.length > 0) {
@@ -769,7 +761,6 @@ export async function backup (
         let docs: Doc[] = []
         try {
           docs = await ctx.with('load-docs', {}, async (ctx) => await connection.loadDocs(domain, needRetrieve))
-          ops++
         } catch (err: any) {
           ctx.error('error loading docs', { domain, err, workspace: workspaceId.name })
           // Put back.
@@ -885,12 +876,16 @@ export async function backup (
 
               const finalBuffer = Buffer.concat(buffers)
               if (finalBuffer.length !== blob.size) {
+                tmpDir = tmpDir ?? (await mkdtemp('backup', {}))
+                const tmpFile = join(tmpDir, blob._id)
+                await writeFile(tmpFile, finalBuffer)
+                await writeFile(tmpFile + '.json', JSON.stringify(blob, undefined, 2))
                 ctx.error('download blob size mismatch', {
                   _id: blob._id,
                   contentType: blob.contentType,
                   size: blob.size,
-                  bufferSize: finalBuffer.length,
-                  provider: blob.provider
+                  provider: blob.provider,
+                  tempDir: tmpDir
                 })
               }
               _pack.entry({ name: d._id + '.json' }, descrJson, (err) => {
@@ -980,7 +975,7 @@ export async function backup (
     }
     ctx.end()
     if (options.timeout !== -1) {
-      clearInterval(timer)
+      clearTimeout(timer)
     }
   }
 }
@@ -1205,12 +1200,22 @@ export async function restore (
       workspace: workspaceId.name
     })
 
+    const doTrim = (s: string | undefined): string | undefined => {
+      if (s == null) {
+        return s
+      }
+      if (s.startsWith('"') && s.endsWith('"')) {
+        return s.slice(1, s.length - 1)
+      }
+      return s
+    }
+
     // Let's find difference
     const docsToAdd = new Map(
       Array.from(changeset.entries()).filter(
         ([it]) =>
           !serverChangeset.has(it) ||
-          (serverChangeset.has(it) && doTrimHash(serverChangeset.get(it)) !== doTrimHash(changeset.get(it)))
+          (serverChangeset.has(it) && doTrim(serverChangeset.get(it)) !== doTrim(changeset.get(it)))
       )
     )
     const docsToRemove = Array.from(serverChangeset.keys()).filter((it) => !changeset.has(it))
diff --git a/server/core/src/__tests__/memAdapters.ts b/server/core/src/__tests__/memAdapters.ts
index 02060086e2..a3109c6a59 100644
--- a/server/core/src/__tests__/memAdapters.ts
+++ b/server/core/src/__tests__/memAdapters.ts
@@ -56,7 +56,7 @@ export class MemStorageAdapter implements StorageAdapter {
     const files = Array.from(this.files.values()).filter((it) => it.workspace === workspaceId.name)
     return {
       next: async () => {
-        return files.splice(0, 100)
+        return files.shift()
       },
       close: async () => {}
     }
@@ -189,7 +189,8 @@ export class MemRawDBAdapter implements RawDBAdapter {
     }
     return {
       next: async () => {
-        return result.splice(0, 50)
+        const doc = result.shift()
+        return doc
       },
       close: async () => {}
     }
diff --git a/server/core/src/adapter.ts b/server/core/src/adapter.ts
index 5d25a954b6..2c14a685dd 100644
--- a/server/core/src/adapter.ts
+++ b/server/core/src/adapter.ts
@@ -57,7 +57,7 @@ export interface DomainHelper {
 }
 
 export interface RawDBAdapterStream<T extends Doc> {
-  next: () => Promise<T[]>
+  next: () => Promise<T | undefined>
   close: () => Promise<void>
 }
 
diff --git a/server/core/src/mem.ts b/server/core/src/mem.ts
index 8b615044d6..60fa7c117a 100644
--- a/server/core/src/mem.ts
+++ b/server/core/src/mem.ts
@@ -86,7 +86,7 @@ export class DummyDbAdapter implements DbAdapter {
 
   find (ctx: MeasureContext, domain: Domain): StorageIterator {
     return {
-      next: async () => [],
+      next: async () => undefined,
       close: async () => {}
     }
   }
diff --git a/server/core/src/server/aggregator.ts b/server/core/src/server/aggregator.ts
index e176aeb6b0..d000c7a2b0 100644
--- a/server/core/src/server/aggregator.ts
+++ b/server/core/src/server/aggregator.ts
@@ -99,20 +99,31 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
   find (ctx: MeasureContext, workspaceId: WorkspaceId): StorageIterator {
     const storageIterator = this.makeStorageIterator(ctx, workspaceId)
 
-    return {
-      next: async () => {
-        const docInfos = await storageIterator.next()
-        if (docInfos.length > 0) {
-          await this.doSyncDocs(ctx, workspaceId, docInfos)
-        }
+    let buffer: ListBlobResult[] = []
 
-        return docInfos.map((it) => ({
-          hash: it.etag,
-          id: it._id,
-          size: it.size
-        }))
+    return {
+      next: async (ctx) => {
+        const docInfo = await storageIterator.next()
+        if (docInfo !== undefined) {
+          buffer.push(docInfo)
+        }
+        if (buffer.length > 50) {
+          await this.doSyncDocs(ctx, workspaceId, buffer)
+
+          buffer = []
+        }
+        if (docInfo !== undefined) {
+          return {
+            hash: docInfo.etag,
+            id: docInfo._id,
+            size: docInfo.size
+          }
+        }
       },
       close: async (ctx) => {
+        if (buffer.length > 0) {
+          await this.doSyncDocs(ctx, workspaceId, buffer)
+        }
         await storageIterator.close()
       }
     }
@@ -123,21 +134,22 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
     let iterator: BlobStorageIterator | undefined
     return {
       next: async () => {
-        if (iterator === undefined && adapters.length > 0) {
-          iterator = await (adapters.shift() as StorageAdapter).listStream(ctx, workspaceId)
-        }
-        if (iterator === undefined) {
-          return []
-        }
-        const docInfos = await iterator.next()
-        if (docInfos.length > 0) {
-          // We need to check if our stored version is fine
-          return docInfos
-        } else {
-          // We need to take next adapter
-          await iterator.close()
-          iterator = undefined
-          return []
+        while (true) {
+          if (iterator === undefined && adapters.length > 0) {
+            iterator = await (adapters.shift() as StorageAdapter).listStream(ctx, workspaceId)
+          }
+          if (iterator === undefined) {
+            return undefined
+          }
+          const docInfo = await iterator.next()
+          if (docInfo !== undefined) {
+            // We need to check if our stored version is fine
+            return docInfo
+          } else {
+            // We need to take next adapter
+            await iterator.close()
+            iterator = undefined
+          }
         }
       },
       close: async () => {
@@ -215,7 +227,7 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
   async listStream (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<BlobStorageIterator> {
     const data = await this.dbAdapter.findStream<Blob>(ctx, workspaceId, DOMAIN_BLOB, {})
     return {
-      next: async (): Promise<ListBlobResult[]> => {
+      next: async (): Promise<ListBlobResult | undefined> => {
         return await data.next()
       },
       close: async () => {
diff --git a/server/core/src/storage.ts b/server/core/src/storage.ts
index 8d436aa733..e252bd4bd9 100644
--- a/server/core/src/storage.ts
+++ b/server/core/src/storage.ts
@@ -9,7 +9,6 @@ import {
   type StorageIterator,
   type WorkspaceId
 } from '@hcengineering/core'
-import { estimateDocSize } from './utils'
 
 export * from '@hcengineering/storage'
 
@@ -20,7 +19,7 @@ export function getBucketId (workspaceId: WorkspaceId): string {
   return toWorkspaceString(workspaceId)
 }
 
-const chunkSize = 512 * 1024
+const chunkSize = 2 * 1024 * 1024
 
 /**
  * @public
@@ -71,15 +70,14 @@ export class BackupClientOps {
       const docs: DocInfo[] = []
 
       while (size < chunkSize) {
-        const _docs = await chunk.iterator.next(ctx)
-        if (_docs.length === 0) {
+        const doc = await chunk.iterator.next(ctx)
+        if (doc === undefined) {
           chunk.finished = true
           break
         }
-        for (const doc of _docs) {
-          size += estimateDocSize(doc)
-          docs.push(doc)
-        }
+
+        size += doc.size
+        docs.push(doc)
       }
 
       return {
diff --git a/server/minio/src/index.ts b/server/minio/src/index.ts
index 1a56fba05c..f0987da53f 100644
--- a/server/minio/src/index.ts
+++ b/server/minio/src/index.ts
@@ -192,7 +192,7 @@ export class MinioService implements StorageAdapter {
 
     const rootPrefix = this.rootPrefix(workspaceId)
     return {
-      next: async (): Promise<ListBlobResult[]> => {
+      next: async (): Promise<ListBlobResult | undefined> => {
         try {
           if (stream === undefined && !done) {
             const rprefix = rootPrefix ?? ''
@@ -227,7 +227,7 @@ export class MinioService implements StorageAdapter {
                 })
               }
               onNext()
-              if (buffer.length > 100) {
+              if (buffer.length > 5) {
                 stream?.pause()
               }
             })
@@ -236,24 +236,24 @@ export class MinioService implements StorageAdapter {
           const msg = (err?.message as string) ?? ''
           if (msg.includes('Invalid bucket name') || msg.includes('The specified bucket does not exist')) {
             hasMore = false
-            return []
+            return
           }
           error = err
         }
 
         if (buffer.length > 0) {
-          return buffer.splice(0, 50)
+          return buffer.shift()
         }
         if (!hasMore) {
-          return []
+          return undefined
         }
-        return await new Promise<ListBlobResult[]>((resolve, reject) => {
+        return await new Promise<ListBlobResult | undefined>((resolve, reject) => {
           onNext = () => {
             if (error != null) {
               reject(error)
             }
             onNext = () => {}
-            resolve(buffer.splice(0, 50))
+            resolve(buffer.shift())
           }
           stream?.resume()
         })
diff --git a/server/mongo/src/rawAdapter.ts b/server/mongo/src/rawAdapter.ts
index f677e4eda5..e1b0a1135d 100644
--- a/server/mongo/src/rawAdapter.ts
+++ b/server/mongo/src/rawAdapter.ts
@@ -105,17 +105,7 @@ export function createRawMongoDBAdapter (url: string): RawDBAdapter {
       const { cursor } = await getCursor(workspace, domain, query, options)
 
       return {
-        next: async () => {
-          const result: T[] = []
-          const doc = await cursor.next()
-          if (doc != null) {
-            result.push(doc)
-          }
-          if (cursor.bufferedCount() > 0) {
-            result.push(...cursor.readBufferedDocuments())
-          }
-          return result
-        },
+        next: async () => (await cursor.next()) ?? undefined,
         close: async () => {
           await cursor.close()
         }
diff --git a/server/mongo/src/storage.ts b/server/mongo/src/storage.ts
index 48b982d49b..6e5a2d733c 100644
--- a/server/mongo/src/storage.ts
+++ b/server/mongo/src/storage.ts
@@ -16,6 +16,7 @@
 import core, {
   DOMAIN_MODEL,
   DOMAIN_TX,
+  type Iterator,
   SortingOrder,
   TxProcessor,
   addOperation,
@@ -29,7 +30,6 @@ import core, {
   type AttachedDoc,
   type Class,
   type Doc,
-  type DocInfo,
   type DocumentQuery,
   type DocumentUpdate,
   type Domain,
@@ -38,7 +38,6 @@ import core, {
   type FindOptions,
   type FindResult,
   type Hierarchy,
-  type Iterator,
   type Lookup,
   type MeasureContext,
   type Mixin,
@@ -136,7 +135,7 @@ export async function toArray<T> (cursor: AbstractCursor<T>): Promise<T[]> {
 }
 
 export interface DbAdapterOptions {
-  calculateHash?: (doc: Doc) => { digest: string, size: number }
+  calculateHash?: (doc: Doc) => string
 }
 
 abstract class MongoAdapterBase implements DbAdapter {
@@ -1035,17 +1034,44 @@ abstract class MongoAdapterBase implements DbAdapter {
           iterator = coll.find({ '%hash%': { $in: ['', null] } })
           d = await ctx.with('next', { mode }, async () => await iterator.next())
         }
-        const result: DocInfo[] = []
-        if (d != null) {
-          result.push(this.toDocInfo(d, bulkUpdate))
+        if (d == null) {
+          return undefined
         }
-        if (iterator.bufferedCount() > 0) {
-          result.push(...iterator.readBufferedDocuments().map((it) => this.toDocInfo(it, bulkUpdate)))
+        let digest: string | null = (d as any)['%hash%']
+        if ('%hash%' in d) {
+          delete d['%hash%']
+        }
+        const pos = (digest ?? '').indexOf('|')
+        if (digest == null || digest === '') {
+          const cs = ctx.newChild('calc-size', {})
+          const size = estimateDocSize(d)
+          cs.end()
+
+          if (this.options?.calculateHash !== undefined) {
+            digest = this.options.calculateHash(d)
+          } else {
+            const hash = createHash('sha256')
+            updateHashForDoc(hash, d)
+            digest = hash.digest('base64')
+          }
+
+          bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
+
+          await ctx.with('flush', {}, async () => {
+            await flush()
+          })
+          return {
+            id: d._id,
+            hash: digest,
+            size
+          }
+        } else {
+          return {
+            id: d._id,
+            hash: digest.slice(0, pos),
+            size: parseInt(digest.slice(pos + 1), 16)
+          }
         }
-        await ctx.with('flush', {}, async () => {
-          await flush()
-        })
-        return result
       },
       close: async () => {
         await ctx.with('flush', {}, async () => {
@@ -1059,38 +1085,6 @@ abstract class MongoAdapterBase implements DbAdapter {
     }
   }
 
-  private toDocInfo (d: Doc, bulkUpdate: Map<Ref<Doc>, string>): DocInfo {
-    let digest: string | null = (d as any)['%hash%']
-    if ('%hash%' in d) {
-      delete d['%hash%']
-    }
-    const pos = (digest ?? '').indexOf('|')
-    if (digest == null || digest === '') {
-      let size = estimateDocSize(d)
-
-      if (this.options?.calculateHash !== undefined) {
-        ;({ digest, size } = this.options.calculateHash(d))
-      } else {
-        const hash = createHash('sha256')
-        updateHashForDoc(hash, d)
-        digest = hash.digest('base64')
-      }
-
-      bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
-      return {
-        id: d._id,
-        hash: digest,
-        size
-      }
-    } else {
-      return {
-        id: d._id,
-        hash: digest.slice(0, pos),
-        size: parseInt(digest.slice(pos + 1), 16)
-      }
-    }
-  }
-
   async load (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
     return await ctx.with('load', { domain }, async () => {
       if (docs.length === 0) {
diff --git a/server/mongo/src/utils.ts b/server/mongo/src/utils.ts
index 616e8dab30..8f555a3ff8 100644
--- a/server/mongo/src/utils.ts
+++ b/server/mongo/src/utils.ts
@@ -23,7 +23,7 @@ import {
 } from '@hcengineering/core'
 import { PlatformError, unknownStatus } from '@hcengineering/platform'
 import { type DomainHelperOperations } from '@hcengineering/server-core'
-import { MongoClient, type Collection, type Db, type Document } from 'mongodb'
+import { MongoClient, type Collection, type Db, type Document, type MongoClientOptions } from 'mongodb'
 
 const connections = new Map<string, MongoClientReferenceImpl>()
 
@@ -121,20 +121,31 @@ export class ClientRef implements MongoClientReference {
  * Initialize a workspace connection to DB
  * @public
  */
-export function getMongoClient (uri: string): MongoClientReference {
+export function getMongoClient (uri: string, options?: MongoClientOptions): MongoClientReference {
   const extraOptions = JSON.parse(process.env.MONGO_OPTIONS ?? '{}')
-  const key = `${uri}${process.env.MONGO_OPTIONS ?? '{}'}`
+  const key = `${uri}${process.env.MONGO_OPTIONS ?? '{}'}_${JSON.stringify(options ?? {})}`
   let existing = connections.get(key)
 
+  const allOptions: MongoClientOptions = {
+    ...options,
+    ...extraOptions
+  }
+
+  // Make poll size stable
+  if (allOptions.maxPoolSize !== undefined) {
+    allOptions.minPoolSize = allOptions.maxPoolSize
+  }
+  allOptions.monitorCommands = false
+  allOptions.noDelay = true
+
   // If not created or closed
   if (existing === undefined) {
     existing = new MongoClientReferenceImpl(
       MongoClient.connect(uri, {
-        retryReads: true,
         appName: 'transactor',
         enableUtf8Validation: false,
 
-        ...extraOptions
+        ...allOptions
       }),
       () => {
         connections.delete(key)
diff --git a/server/postgres/src/storage.ts b/server/postgres/src/storage.ts
index 7f50259e79..5cb5eeceb1 100644
--- a/server/postgres/src/storage.ts
+++ b/server/postgres/src/storage.ts
@@ -18,7 +18,6 @@ import core, {
   type AttachedDoc,
   type Class,
   type Doc,
-  type DocInfo,
   type DocumentQuery,
   type DocumentUpdate,
   type Domain,
@@ -942,12 +941,12 @@ abstract class PostgresAdapterBase implements DbAdapter {
       )
     }
 
-    const next = async (limit: number): Promise<Doc[]> => {
-      const result = await client.query(`FETCH ${limit} FROM ${cursorName}`)
+    const next = async (): Promise<Doc | null> => {
+      const result = await client.query(`FETCH 1 FROM ${cursorName}`)
       if (result.rows.length === 0) {
-        return []
+        return null
       }
-      return result.rows.filter((it) => it != null).map((it) => parseDoc(it))
+      return result.rows[0] !== undefined ? parseDoc(result.rows[0]) : null
     }
 
     const flush = async (flush = false): Promise<void> => {
@@ -976,51 +975,47 @@ abstract class PostgresAdapterBase implements DbAdapter {
           await init('_id, data', "data ->> '%hash%' IS NOT NULL AND data ->> '%hash%' <> ''")
           initialized = true
         }
-        let docs = await ctx.with('next', { mode }, async () => await next(50))
-        if (docs.length === 0 && mode === 'hashed') {
+        let d = await ctx.with('next', { mode }, async () => await next())
+        if (d == null && mode === 'hashed') {
           await close(cursorName)
           mode = 'non_hashed'
           await init('*', "data ->> '%hash%' IS NULL OR data ->> '%hash%' = ''")
-          docs = await ctx.with('next', { mode }, async () => await next(50))
+          d = await ctx.with('next', { mode }, async () => await next())
         }
-        if (docs.length === 0) {
-          return []
+        if (d == null) {
+          return undefined
         }
-        const result: DocInfo[] = []
-        for (const d of docs) {
-          let digest: string | null = (d as any)['%hash%']
-          if ('%hash%' in d) {
-            delete d['%hash%']
+        let digest: string | null = (d as any)['%hash%']
+        if ('%hash%' in d) {
+          delete d['%hash%']
+        }
+        const pos = (digest ?? '').indexOf('|')
+        if (digest == null || digest === '') {
+          const cs = ctx.newChild('calc-size', {})
+          const size = estimateDocSize(d)
+          cs.end()
+
+          const hash = createHash('sha256')
+          updateHashForDoc(hash, d)
+          digest = hash.digest('base64')
+
+          bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
+
+          await ctx.with('flush', {}, async () => {
+            await flush()
+          })
+          return {
+            id: d._id,
+            hash: digest,
+            size
           }
-          const pos = (digest ?? '').indexOf('|')
-          if (digest == null || digest === '') {
-            const cs = ctx.newChild('calc-size', {})
-            const size = estimateDocSize(d)
-            cs.end()
-
-            const hash = createHash('sha256')
-            updateHashForDoc(hash, d)
-            digest = hash.digest('base64')
-
-            bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
-
-            await ctx.with('flush', {}, async () => {
-              await flush()
-            })
-            result.push({
-              id: d._id,
-              hash: digest,
-              size
-            })
-          } else {
-            result.push({
-              id: d._id,
-              hash: digest.slice(0, pos),
-              size: parseInt(digest.slice(pos + 1), 16)
-            })
+        } else {
+          return {
+            id: d._id,
+            hash: digest.slice(0, pos),
+            size: parseInt(digest.slice(pos + 1), 16)
           }
         }
-        return result
       },
       close: async () => {
         await ctx.with('flush', {}, async () => {
diff --git a/server/s3/src/index.ts b/server/s3/src/index.ts
index 1314ebd98e..cc2c435481 100644
--- a/server/s3/src/index.ts
+++ b/server/s3/src/index.ts
@@ -239,9 +239,9 @@ export class S3Service implements StorageAdapter {
 
     const rootPrefix = this.rootPrefix(workspaceId)
     return {
-      next: async (): Promise<ListBlobResult[]> => {
+      next: async (): Promise<ListBlobResult | undefined> => {
         try {
-          while (hasMore && buffer.length < 50) {
+          if (hasMore && buffer.length === 0) {
             const res = await this.client.listObjectsV2({
               Bucket: this.getBucketId(workspaceId),
               Prefix: rootPrefix ?? '',
@@ -271,7 +271,12 @@ export class S3Service implements StorageAdapter {
         } catch (err: any) {
           ctx.error('Failed to get list', { error: err, workspaceId: workspaceId.name })
         }
-        return buffer.splice(0, 50)
+        if (buffer.length > 0) {
+          return buffer.shift()
+        }
+        if (!hasMore) {
+          return undefined
+        }
       },
       close: async () => {}
     }
diff --git a/server/server-storage/src/blobStorage.ts b/server/server-storage/src/blobStorage.ts
index a44d147ad0..5f9202d98e 100644
--- a/server/server-storage/src/blobStorage.ts
+++ b/server/server-storage/src/blobStorage.ts
@@ -170,9 +170,9 @@ export async function createStorageDataAdapter (
     calculateHash: (d) => {
       const blob = d as Blob
       if (storageEx?.adapters !== undefined && storageEx.adapters.get(blob.provider) === undefined) {
-        return { digest: blob.etag + '_' + storageEx.defaultAdapter, size: blob.size }
+        return blob.etag + '_' + storageEx.defaultAdapter // Replace tag to be able to move to new provider
       }
-      return { digest: blob.etag, size: blob.size }
+      return blob.etag
     }
   })
   return new StorageBlobAdapter(workspaceId, storage, ctx, blobAdapter)

From fada0c47180f8db7290c4d95cc5f134b49652dbb Mon Sep 17 00:00:00 2001
From: Alexander Onnikov <Alexander.Onnikov@xored.com>
Date: Tue, 17 Sep 2024 07:31:33 +0700
Subject: [PATCH 4/6] UBERF-8131 Reuse editor styles in rendered content
 (#6587)

---
 .../src/components/HTMLViewer.svelte             |  4 +++-
 .../src/components/MessageViewer.svelte          |  4 +++-
 packages/theme/styles/_text-editor.scss          |  3 ++-
 .../model/documents/document-comments-page.ts    | 16 ++++++++++++----
 4 files changed, 20 insertions(+), 7 deletions(-)

diff --git a/packages/presentation/src/components/HTMLViewer.svelte b/packages/presentation/src/components/HTMLViewer.svelte
index ef51054424..1cd18df40e 100644
--- a/packages/presentation/src/components/HTMLViewer.svelte
+++ b/packages/presentation/src/components/HTMLViewer.svelte
@@ -22,4 +22,6 @@
   $: node = htmlToJSON(value)
 </script>
 
-<Node {node} {preview} />
+<div class="text-markup-view">
+  <Node {node} {preview} />
+</div>
diff --git a/packages/presentation/src/components/MessageViewer.svelte b/packages/presentation/src/components/MessageViewer.svelte
index d8074f02f9..5eb062b404 100644
--- a/packages/presentation/src/components/MessageViewer.svelte
+++ b/packages/presentation/src/components/MessageViewer.svelte
@@ -28,4 +28,6 @@
   }
 </script>
 
-<Node {node} {preview} />
+<div class="text-markup-view">
+  <Node {node} {preview} />
+</div>
diff --git a/packages/theme/styles/_text-editor.scss b/packages/theme/styles/_text-editor.scss
index 50fb11ff2b..9ea99e0d45 100644
--- a/packages/theme/styles/_text-editor.scss
+++ b/packages/theme/styles/_text-editor.scss
@@ -7,7 +7,8 @@
   }
 }
 
-.text-editor-view {
+.text-editor-view,
+.text-markup-view {
   // overflow-y: auto;
   color: var(--theme-text-primary-color);
   margin: 0.25rem 0;
diff --git a/qms-tests/sanity/tests/model/documents/document-comments-page.ts b/qms-tests/sanity/tests/model/documents/document-comments-page.ts
index 2f59785378..f13c0e3a74 100644
--- a/qms-tests/sanity/tests/model/documents/document-comments-page.ts
+++ b/qms-tests/sanity/tests/model/documents/document-comments-page.ts
@@ -86,9 +86,13 @@ export class DocumentCommentsPage extends DocumentCommonPage {
     // check author
     await expect(comment.locator('div.root div.header > a span[class*="label"]').first()).toHaveText(author)
     // check message
-    await expect(comment.locator('div.activityMessage div.flex-col div.clear-mins > p').first()).toHaveText(message)
+    await expect(
+      comment.locator('div.activityMessage div.flex-col div.clear-mins div.text-markup-view > p').first()
+    ).toHaveText(message)
     // check comment
-    await expect(comment.locator('div.activityMessage div.flex-col div.clear-mins > p').last()).toHaveText(reply)
+    await expect(
+      comment.locator('div.activityMessage div.flex-col div.clear-mins div.text-markup-view > p').last()
+    ).toHaveText(reply)
   }
 
   async checkCommentInPanelById (
@@ -109,8 +113,12 @@ export class DocumentCommentsPage extends DocumentCommonPage {
     // check author
     await expect(comment.locator('div.root div.header > a span[class*="label"]').first()).toHaveText(author)
     // check message
-    await expect(comment.locator('div.activityMessage div.flex-col div.clear-mins > p').first()).toHaveText(message)
+    await expect(
+      comment.locator('div.activityMessage div.flex-col div.clear-mins div.text-markup-view > p').first()
+    ).toHaveText(message)
     // check comment
-    await expect(comment.locator('div.activityMessage div.flex-col div.clear-mins > p').last()).toHaveText(reply)
+    await expect(
+      comment.locator('div.activityMessage div.flex-col div.clear-mins div.text-markup-view > p').last()
+    ).toHaveText(reply)
   }
 }

From 17e1013213ebc5c362e82383966d6494252f81c1 Mon Sep 17 00:00:00 2001
From: Alexander Onnikov <Alexander.Onnikov@xored.com>
Date: Tue, 17 Sep 2024 07:32:53 +0700
Subject: [PATCH 5/6] fix: get rid of NOTIFY_INBOX_ONLY env variable (#6592)

---
 .vscode/launch.json                                | 3 +--
 dev/docker-compose.yaml                            | 1 -
 qms-tests/docker-compose.yaml                      | 1 -
 server-plugins/notification-resources/src/utils.ts | 2 +-
 server-plugins/notification/src/index.ts           | 3 ++-
 server/workspace-service/package.json              | 1 +
 server/workspace-service/src/index.ts              | 3 +++
 tests/docker-compose.yaml                          | 1 -
 8 files changed, 8 insertions(+), 7 deletions(-)

diff --git a/.vscode/launch.json b/.vscode/launch.json
index 2e571562ed..3dcf647f59 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -128,10 +128,9 @@
         "MINIO_ACCESS_KEY": "minioadmin",
         "MINIO_SECRET_KEY": "minioadmin",
         "MINIO_ENDPOINT": "localhost",
-        "MODEL_VERSION": "v0.6.286",
+        "MODEL_VERSION": "v0.6.286"
         // "INIT_SCRIPT_URL": "https://raw.githubusercontent.com/hcengineering/init/main/script.yaml",
         // "INIT_WORKSPACE": "onboarding",
-        "NOTIFY_INBOX_ONLY": "true"
       },
       "runtimeVersion": "20",
       "runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
diff --git a/dev/docker-compose.yaml b/dev/docker-compose.yaml
index 6c31ffd90e..d5c15cd24c 100644
--- a/dev/docker-compose.yaml
+++ b/dev/docker-compose.yaml
@@ -110,7 +110,6 @@ services:
       - MODEL_ENABLED=*
       - ACCOUNTS_URL=http://host.docker.internal:3000
       - BRANDING_PATH=/var/cfg/branding.json
-      - NOTIFY_INBOX_ONLY=true
       # - PARALLEL=2
       # - INIT_SCRIPT_URL=https://raw.githubusercontent.com/hcengineering/init/main/script.yaml
       # - INIT_WORKSPACE=onboarding
diff --git a/qms-tests/docker-compose.yaml b/qms-tests/docker-compose.yaml
index 982aea573b..f78ff67964 100644
--- a/qms-tests/docker-compose.yaml
+++ b/qms-tests/docker-compose.yaml
@@ -75,7 +75,6 @@ services:
       - MODEL_ENABLED=*
       - ACCOUNTS_URL=http://account:3003
       - BRANDING_PATH=/var/cfg/branding.json
-      - NOTIFY_INBOX_ONLY=true
     restart: unless-stopped
   front:
     image: hardcoreeng/front
diff --git a/server-plugins/notification-resources/src/utils.ts b/server-plugins/notification-resources/src/utils.ts
index f6492b9427..916876bcbe 100644
--- a/server-plugins/notification-resources/src/utils.ts
+++ b/server-plugins/notification-resources/src/utils.ts
@@ -200,7 +200,7 @@ export async function isShouldNotifyTx (
   const result = new Map<Ref<NotificationProvider>, BaseNotificationType[]>()
   let providers: NotificationProvider[] = control.modelDb.findAllSync(notification.class.NotificationProvider, {})
 
-  if (process.env.NOTIFY_INBOX_ONLY === 'true') {
+  if (getMetadata(serverNotification.metadata.InboxOnlyNotifications) === true) {
     providers = providers.filter((it) => it._id === notification.providers.InboxNotificationProvider)
   }
 
diff --git a/server-plugins/notification/src/index.ts b/server-plugins/notification/src/index.ts
index ee077398c2..c47eed36dd 100644
--- a/server-plugins/notification/src/index.ts
+++ b/server-plugins/notification/src/index.ts
@@ -151,7 +151,8 @@ export default plugin(serverNotificationId, {
   metadata: {
     SesUrl: '' as Metadata<string>,
     PushPrivateKey: '' as Metadata<string>,
-    PushSubject: '' as Metadata<string>
+    PushSubject: '' as Metadata<string>,
+    InboxOnlyNotifications: '' as Metadata<boolean>
   },
   class: {
     NotificationProviderResources: '' as Ref<Class<NotificationProviderResources>>
diff --git a/server/workspace-service/package.json b/server/workspace-service/package.json
index af284d6b52..c9ca9bfb73 100644
--- a/server/workspace-service/package.json
+++ b/server/workspace-service/package.json
@@ -54,6 +54,7 @@
     "@hcengineering/server-pipeline": "^0.6.0",
     "@hcengineering/server-client": "^0.6.0",
     "@hcengineering/server-token": "^0.6.11",
+    "@hcengineering/server-notification": "^0.6.1",
     "@hcengineering/analytics": "^0.6.0"
   }
 }
diff --git a/server/workspace-service/src/index.ts b/server/workspace-service/src/index.ts
index ebfd659791..b3e353a437 100644
--- a/server/workspace-service/src/index.ts
+++ b/server/workspace-service/src/index.ts
@@ -25,6 +25,7 @@ import {
 import { type MigrateOperation } from '@hcengineering/model'
 import { setMetadata } from '@hcengineering/platform'
 import serverClientPlugin from '@hcengineering/server-client'
+import serverNotification from '@hcengineering/server-notification'
 import serverToken from '@hcengineering/server-token'
 import toolPlugin from '@hcengineering/server-tool'
 import { WorkspaceWorker } from './service'
@@ -100,6 +101,8 @@ export function serveWorkspaceAccount (
   }
   setMetadata(serverClientPlugin.metadata.UserAgent, 'WorkspaceService')
 
+  setMetadata(serverNotification.metadata.InboxOnlyNotifications, true)
+
   const worker = new WorkspaceWorker(
     version,
     txes,
diff --git a/tests/docker-compose.yaml b/tests/docker-compose.yaml
index 76cc3f4a4c..b7dbcaf257 100644
--- a/tests/docker-compose.yaml
+++ b/tests/docker-compose.yaml
@@ -70,7 +70,6 @@ services:
       - MODEL_ENABLED=*
       - ACCOUNTS_URL=http://account:3003
       - BRANDING_PATH=/var/cfg/branding.json
-      - NOTIFY_INBOX_ONLY=true
     restart: unless-stopped
   front:
     image: hardcoreeng/front

From 66db3ac0293c9732c70c8f8d7ce6d48b33d45ceb Mon Sep 17 00:00:00 2001
From: Andrey Sobolev <haiodo@users.noreply.github.com>
Date: Tue, 17 Sep 2024 13:02:35 +0700
Subject: [PATCH 6/6] Reapply "UBERF-8122: Fix backup service" (#6595)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
---
 .vscode/launch.json                      |   7 +-
 dev/tool/package.json                    |   3 +-
 dev/tool/src/clean.ts                    |  29 +++---
 dev/tool/src/storage.ts                  | 125 ++++++++++++-----------
 packages/core/src/server.ts              |   2 +-
 packages/storage/src/index.ts            |  26 ++---
 pods/server/Dockerfile                   |   2 -
 server/backup/src/backup.ts              |  69 ++++++-------
 server/core/src/__tests__/memAdapters.ts |   5 +-
 server/core/src/adapter.ts               |   2 +-
 server/core/src/mem.ts                   |   2 +-
 server/core/src/server/aggregator.ts     |  62 +++++------
 server/core/src/storage.ts               |  14 +--
 server/minio/src/index.ts                |  14 +--
 server/mongo/src/rawAdapter.ts           |  12 ++-
 server/mongo/src/storage.ts              |  82 ++++++++-------
 server/mongo/src/utils.ts                |  21 +---
 server/postgres/src/storage.ts           |  79 +++++++-------
 server/s3/src/index.ts                   |  11 +-
 server/server-storage/src/blobStorage.ts |   4 +-
 20 files changed, 287 insertions(+), 284 deletions(-)

diff --git a/.vscode/launch.json b/.vscode/launch.json
index 3dcf647f59..5735b8797b 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -220,7 +220,7 @@
       "name": "Debug backup tool",
       "type": "node",
       "request": "launch",
-      "args": ["src/index.ts", "backup", "../../../dump/alex-staff-agency2", "alex-staff-agency"],
+      "args": ["src/index.ts", "backup", "../../../dump/platform2", "platform"],
       "env": {
         "MINIO_ACCESS_KEY": "minioadmin",
         "MINIO_SECRET_KEY": "minioadmin",
@@ -233,7 +233,10 @@
       "runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
       "sourceMaps": true,
       "cwd": "${workspaceRoot}/dev/tool",
-      "protocol": "inspector"
+      "protocol": "inspector",
+      "outputCapture": "std",
+      "runtimeVersion": "20",
+      "showAsyncStacks": true,
     },
     {
       "name": "Debug tool upgrade",
diff --git a/dev/tool/package.json b/dev/tool/package.json
index 476c331442..bd62efdc73 100644
--- a/dev/tool/package.json
+++ b/dev/tool/package.json
@@ -14,12 +14,13 @@
     "_phase:bundle": "rushx bundle",
     "_phase:docker-build": "rushx docker:build",
     "_phase:docker-staging": "rushx docker:staging",
-    "bundle": "mkdir -p bundle && esbuild src/__start.ts --bundle --minify --platform=node --define:process.env.MODEL_VERSION=$(node ../../common/scripts/show_version.js) --define:process.env.GIT_REVISION=$(../../common/scripts/git_version.sh) > bundle/bundle.js",
+    "bundle": "mkdir -p bundle && esbuild src/__start.ts --bundle --keep-names --sourcemap=external --platform=node --define:process.env.MODEL_VERSION=$(node ../../common/scripts/show_version.js) --define:process.env.GIT_REVISION=$(../../common/scripts/git_version.sh) --log-level=error --outfile=bundle/bundle.js",
     "docker:build": "../../common/scripts/docker_build.sh hardcoreeng/tool",
     "docker:tbuild": "docker build -t hardcoreeng/tool . --platform=linux/amd64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/tool",
     "docker:staging": "../../common/scripts/docker_tag.sh hardcoreeng/tool staging",
     "docker:push": "../../common/scripts/docker_tag.sh hardcoreeng/tool",
     "run-local": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost MONGO_URL=mongodb://localhost:27017 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --max-old-space-size=18000 ./bundle/bundle.js",
+    "run-local-brk": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost MONGO_URL=mongodb://localhost:27017 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --inspect-brk --enable-source-maps --max-old-space-size=18000 ./bundle/bundle.js",
     "run": "rush bundle --to @hcengineering/tool >/dev/null && cross-env node --max-old-space-size=8000 ./bundle/bundle.js",
     "upgrade": "rushx run-local upgrade",
     "format": "format src",
diff --git a/dev/tool/src/clean.ts b/dev/tool/src/clean.ts
index 612dcb0a92..cd199eec1e 100644
--- a/dev/tool/src/clean.ts
+++ b/dev/tool/src/clean.ts
@@ -104,12 +104,15 @@ export async function cleanWorkspace (
     const minioList = await storageAdapter.listStream(ctx, workspaceId)
     const toClean: string[] = []
     while (true) {
-      const mv = await minioList.next()
-      if (mv === undefined) {
+      const mvFiles = await minioList.next()
+      if (mvFiles.length === 0) {
         break
       }
-      if (!files.has(mv._id)) {
-        toClean.push(mv._id)
+
+      for (const mv of mvFiles) {
+        if (!files.has(mv._id)) {
+          toClean.push(mv._id)
+        }
       }
     }
     await storageAdapter.remove(ctx, workspaceId, toClean)
@@ -192,16 +195,18 @@ export async function fixMinioBW (
   const list = await storageService.listStream(ctx, workspaceId)
   let removed = 0
   while (true) {
-    const obj = await list.next()
-    if (obj === undefined) {
+    const objs = await list.next()
+    if (objs.length === 0) {
       break
     }
-    if (obj.modifiedOn < from) continue
-    if ((obj._id as string).includes('%preview%')) {
-      await storageService.remove(ctx, workspaceId, [obj._id])
-      removed++
-      if (removed % 100 === 0) {
-        console.log('removed: ', removed)
+    for (const obj of objs) {
+      if (obj.modifiedOn < from) continue
+      if ((obj._id as string).includes('%preview%')) {
+        await storageService.remove(ctx, workspaceId, [obj._id])
+        removed++
+        if (removed % 100 === 0) {
+          console.log('removed: ', removed)
+        }
       }
     }
   }
diff --git a/dev/tool/src/storage.ts b/dev/tool/src/storage.ts
index 1dc0f55d96..e5a305a910 100644
--- a/dev/tool/src/storage.ts
+++ b/dev/tool/src/storage.ts
@@ -40,20 +40,22 @@ export async function syncFiles (
       const iterator = await adapter.listStream(ctx, workspaceId)
       try {
         while (true) {
-          const data = await iterator.next()
-          if (data === undefined) break
+          const dataBulk = await iterator.next()
+          if (dataBulk.length === 0) break
 
-          const blob = await exAdapter.stat(ctx, workspaceId, data._id)
-          if (blob !== undefined) continue
+          for (const data of dataBulk) {
+            const blob = await exAdapter.stat(ctx, workspaceId, data._id)
+            if (blob !== undefined) continue
 
-          await exAdapter.syncBlobFromStorage(ctx, workspaceId, data._id, name)
+            await exAdapter.syncBlobFromStorage(ctx, workspaceId, data._id, name)
 
-          count += 1
-          if (count % 100 === 0) {
-            const duration = Date.now() - time
-            time = Date.now()
+            count += 1
+            if (count % 100 === 0) {
+              const duration = Date.now() - time
+              time = Date.now()
 
-            console.log('...processed', count, Math.round(duration / 1000) + 's')
+              console.log('...processed', count, Math.round(duration / 1000) + 's')
+            }
           }
         }
         console.log('processed', count)
@@ -112,64 +114,67 @@ async function processAdapter (
   const iterator = await source.listStream(ctx, workspaceId)
   try {
     while (true) {
-      const data = await iterator.next()
-      if (data === undefined) break
+      const dataBulk = await iterator.next()
+      if (dataBulk.length === 0) break
 
-      const blob = (await exAdapter.stat(ctx, workspaceId, data._id)) ?? (await source.stat(ctx, workspaceId, data._id))
+      for (const data of dataBulk) {
+        const blob =
+          (await exAdapter.stat(ctx, workspaceId, data._id)) ?? (await source.stat(ctx, workspaceId, data._id))
 
-      if (blob === undefined) {
-        console.error('blob not found', data._id)
-        continue
-      }
-
-      if (blob.provider !== exAdapter.defaultAdapter) {
-        if (blob.size <= params.blobSizeLimitMb * 1024 * 1024) {
-          await rateLimiter.exec(async () => {
-            try {
-              await retryOnFailure(
-                ctx,
-                5,
-                async () => {
-                  await processFile(ctx, source, params.move ? exAdapter : target, workspaceId, blob)
-                },
-                50
-              )
-              movedCnt += 1
-              movedBytes += blob.size
-              batchBytes += blob.size
-            } catch (err) {
-              console.error('failed to process blob', data._id, err)
-            }
-          })
-        } else {
-          skippedCnt += 1
-          console.log('skipping large blob', data._id, Math.round(blob.size / 1024 / 1024))
+        if (blob === undefined) {
+          console.error('blob not found', data._id)
+          continue
         }
-      }
 
-      processedCnt += 1
-      processedBytes += blob.size
+        if (blob.provider !== exAdapter.defaultAdapter) {
+          if (blob.size <= params.blobSizeLimitMb * 1024 * 1024) {
+            await rateLimiter.exec(async () => {
+              try {
+                await retryOnFailure(
+                  ctx,
+                  5,
+                  async () => {
+                    await processFile(ctx, source, params.move ? exAdapter : target, workspaceId, blob)
+                  },
+                  50
+                )
+                movedCnt += 1
+                movedBytes += blob.size
+                batchBytes += blob.size
+              } catch (err) {
+                console.error('failed to process blob', data._id, err)
+              }
+            })
+          } else {
+            skippedCnt += 1
+            console.log('skipping large blob', data._id, Math.round(blob.size / 1024 / 1024))
+          }
+        }
 
-      if (processedCnt % 100 === 0) {
-        await rateLimiter.waitProcessing()
+        processedCnt += 1
+        processedBytes += blob.size
 
-        const duration = Date.now() - time
+        if (processedCnt % 100 === 0) {
+          await rateLimiter.waitProcessing()
 
-        console.log(
-          '...processed',
-          processedCnt,
-          Math.round(processedBytes / 1024 / 1024) + 'MB',
-          'moved',
-          movedCnt,
-          Math.round(movedBytes / 1024 / 1024) + 'MB',
-          '+' + Math.round(batchBytes / 1024 / 1024) + 'MB',
-          'skipped',
-          skippedCnt,
-          Math.round(duration / 1000) + 's'
-        )
+          const duration = Date.now() - time
 
-        batchBytes = 0
-        time = Date.now()
+          console.log(
+            '...processed',
+            processedCnt,
+            Math.round(processedBytes / 1024 / 1024) + 'MB',
+            'moved',
+            movedCnt,
+            Math.round(movedBytes / 1024 / 1024) + 'MB',
+            '+' + Math.round(batchBytes / 1024 / 1024) + 'MB',
+            'skipped',
+            skippedCnt,
+            Math.round(duration / 1000) + 's'
+          )
+
+          batchBytes = 0
+          time = Date.now()
+        }
       }
     }
 
diff --git a/packages/core/src/server.ts b/packages/core/src/server.ts
index b6c86d422f..8ae4596a6e 100644
--- a/packages/core/src/server.ts
+++ b/packages/core/src/server.ts
@@ -31,7 +31,7 @@ export interface DocInfo {
  * @public
  */
 export interface StorageIterator {
-  next: (ctx: MeasureContext) => Promise<DocInfo | undefined>
+  next: (ctx: MeasureContext) => Promise<DocInfo[]>
   close: (ctx: MeasureContext) => Promise<void>
 }
 
diff --git a/packages/storage/src/index.ts b/packages/storage/src/index.ts
index 95a0e97cc0..5a1a850d42 100644
--- a/packages/storage/src/index.ts
+++ b/packages/storage/src/index.ts
@@ -24,7 +24,7 @@ export interface UploadedObjectInfo {
 }
 
 export interface BlobStorageIterator {
-  next: () => Promise<ListBlobResult | undefined>
+  next: () => Promise<ListBlobResult[]>
   close: () => Promise<void>
 }
 
@@ -99,7 +99,7 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
 
   find (ctx: MeasureContext, workspaceId: WorkspaceId): StorageIterator {
     return {
-      next: async (ctx) => undefined,
+      next: async (ctx) => [],
       close: async (ctx) => {}
     }
   }
@@ -120,8 +120,8 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
 
   async listStream (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<BlobStorageIterator> {
     return {
-      next: async (): Promise<ListBlobResult | undefined> => {
-        return undefined
+      next: async (): Promise<ListBlobResult[]> => {
+        return []
       },
       close: async () => {}
     }
@@ -179,14 +179,16 @@ export async function removeAllObjects (
   const iterator = await storage.listStream(ctx, workspaceId)
   let bulk: string[] = []
   while (true) {
-    const obj = await iterator.next()
-    if (obj === undefined) {
+    const objs = await iterator.next()
+    if (objs.length === 0) {
       break
     }
-    bulk.push(obj.storageId)
-    if (bulk.length > 50) {
-      await storage.remove(ctx, workspaceId, bulk)
-      bulk = []
+    for (const obj of objs) {
+      bulk.push(obj.storageId)
+      if (bulk.length > 50) {
+        await storage.remove(ctx, workspaceId, bulk)
+        bulk = []
+      }
     }
   }
   if (bulk.length > 0) {
@@ -206,10 +208,10 @@ export async function objectsToArray (
   const bulk: ListBlobResult[] = []
   while (true) {
     const obj = await iterator.next()
-    if (obj === undefined) {
+    if (obj.length === 0) {
       break
     }
-    bulk.push(obj)
+    bulk.push(...obj)
   }
   await iterator.close()
   return bulk
diff --git a/pods/server/Dockerfile b/pods/server/Dockerfile
index 555b5c694d..938ff0edc2 100644
--- a/pods/server/Dockerfile
+++ b/pods/server/Dockerfile
@@ -2,7 +2,6 @@ FROM node:20
 
 WORKDIR /usr/src/app
 RUN npm install --ignore-scripts=false --verbose bufferutil utf-8-validate @mongodb-js/zstd snappy msgpackr msgpackr-extract --unsafe-perm
-RUN npm install --ignore-scripts=false --verbose uNetworking/uWebSockets.js#v20.47.0
 
 RUN apt-get update
 RUN apt-get install libjemalloc2
@@ -10,7 +9,6 @@ RUN apt-get install libjemalloc2
 ENV LD_PRELOAD=libjemalloc.so.2
 ENV MALLOC_CONF=dirty_decay_ms:1000,narenas:2,background_thread:true
 
-RUN mv node_modules/uWebSockets.js/*.node .
 COPY bundle/bundle.js ./
 COPY bundle/bundle.js.map ./
 
diff --git a/server/backup/src/backup.ts b/server/backup/src/backup.ts
index d92f368002..f464372c88 100644
--- a/server/backup/src/backup.ts
+++ b/server/backup/src/backup.ts
@@ -41,7 +41,6 @@ import { BlobClient, createClient } from '@hcengineering/server-client'
 import { fullTextPushStagePrefix, type StorageAdapter } from '@hcengineering/server-core'
 import { generateToken } from '@hcengineering/server-token'
 import { connect } from '@hcengineering/server-tool'
-import { mkdtemp, writeFile } from 'node:fs/promises'
 import { PassThrough } from 'node:stream'
 import { createGzip } from 'node:zlib'
 import { join } from 'path'
@@ -488,6 +487,16 @@ async function cleanDomain (ctx: MeasureContext, connection: CoreClient & Backup
   }
 }
 
+function doTrimHash (s: string | undefined): string {
+  if (s == null) {
+    return ''
+  }
+  if (s.startsWith('"') && s.endsWith('"')) {
+    return s.slice(1, s.length - 1)
+  }
+  return s
+}
+
 /**
  * @public
  */
@@ -526,11 +535,15 @@ export async function backup (
 
   let canceled = false
   let timer: any
+  let ops = 0
 
   if (options.timeout > 0) {
-    timer = setTimeout(() => {
-      ctx.error('Timeout during backup', { workspace: workspaceId.name, timeout: options.timeout / 1000 })
-      canceled = true
+    timer = setInterval(() => {
+      if (ops === 0) {
+        ctx.error('Timeout during backup', { workspace: workspaceId.name, timeout: options.timeout / 1000 })
+        ops = 0
+        canceled = true
+      }
     }, options.timeout)
   }
 
@@ -545,8 +558,6 @@ export async function backup (
   const blobClient = new BlobClient(transactorUrl, token, workspaceId, { storageAdapter: options.storageAdapter })
   ctx.info('starting backup', { workspace: workspaceId.name })
 
-  let tmpDir: string | undefined
-
   try {
     const domains = [
       ...connection
@@ -613,6 +624,7 @@ export async function backup (
       if (size == null || Number.isNaN(size)) {
         return
       }
+      ops++
       downloaded += size
       const newDownloadedMb = Math.round(downloaded / (1024 * 1024))
       const newId = Math.round(newDownloadedMb / 10)
@@ -641,6 +653,7 @@ export async function backup (
         try {
           const currentChunk = await ctx.with('loadChunk', {}, () => connection.loadChunk(domain, idx, options.recheck))
           idx = currentChunk.idx
+          ops++
 
           let needRetrieve: Ref<Doc>[] = []
           let currentNeedRetrieveSize = 0
@@ -656,17 +669,18 @@ export async function backup (
               })
               st = Date.now()
             }
-            const kHash = digest.get(id as Ref<Doc>)
+            const _hash = doTrimHash(hash)
+            const kHash = doTrimHash(digest.get(id as Ref<Doc>))
             if (kHash !== undefined) {
               digest.delete(id as Ref<Doc>)
-              if (kHash !== hash) {
-                changes.updated.set(id as Ref<Doc>, hash)
+              if (kHash !== _hash) {
+                changes.updated.set(id as Ref<Doc>, _hash)
                 needRetrieve.push(id as Ref<Doc>)
                 currentNeedRetrieveSize += size
                 changed++
               }
             } else {
-              changes.added.set(id as Ref<Doc>, hash)
+              changes.added.set(id as Ref<Doc>, _hash)
               needRetrieve.push(id as Ref<Doc>)
               changed++
               currentNeedRetrieveSize += size
@@ -728,19 +742,13 @@ export async function backup (
       }
 
       // Cumulative digest
-      const digest = await ctx.with(
-        'load-digest',
-        {},
-        async (ctx) => await loadDigest(ctx, storage, backupInfo.snapshots, domain)
-      )
+      const digest = await ctx.with('load-digest', {}, (ctx) => loadDigest(ctx, storage, backupInfo.snapshots, domain))
 
       let _pack: Pack | undefined
       let addedDocuments = 0
 
-      let { changed, needRetrieveChunks } = await ctx.with(
-        'load-chunks',
-        { domain },
-        async (ctx) => await loadChangesFromServer(ctx, domain, digest, changes)
+      let { changed, needRetrieveChunks } = await ctx.with('load-chunks', { domain }, (ctx) =>
+        loadChangesFromServer(ctx, domain, digest, changes)
       )
 
       if (needRetrieveChunks.length > 0) {
@@ -761,6 +769,7 @@ export async function backup (
         let docs: Doc[] = []
         try {
           docs = await ctx.with('load-docs', {}, async (ctx) => await connection.loadDocs(domain, needRetrieve))
+          ops++
         } catch (err: any) {
           ctx.error('error loading docs', { domain, err, workspace: workspaceId.name })
           // Put back.
@@ -876,16 +885,12 @@ export async function backup (
 
               const finalBuffer = Buffer.concat(buffers)
               if (finalBuffer.length !== blob.size) {
-                tmpDir = tmpDir ?? (await mkdtemp('backup', {}))
-                const tmpFile = join(tmpDir, blob._id)
-                await writeFile(tmpFile, finalBuffer)
-                await writeFile(tmpFile + '.json', JSON.stringify(blob, undefined, 2))
                 ctx.error('download blob size mismatch', {
                   _id: blob._id,
                   contentType: blob.contentType,
                   size: blob.size,
-                  provider: blob.provider,
-                  tempDir: tmpDir
+                  bufferSize: finalBuffer.length,
+                  provider: blob.provider
                 })
               }
               _pack.entry({ name: d._id + '.json' }, descrJson, (err) => {
@@ -975,7 +980,7 @@ export async function backup (
     }
     ctx.end()
     if (options.timeout !== -1) {
-      clearTimeout(timer)
+      clearInterval(timer)
     }
   }
 }
@@ -1200,22 +1205,12 @@ export async function restore (
       workspace: workspaceId.name
     })
 
-    const doTrim = (s: string | undefined): string | undefined => {
-      if (s == null) {
-        return s
-      }
-      if (s.startsWith('"') && s.endsWith('"')) {
-        return s.slice(1, s.length - 1)
-      }
-      return s
-    }
-
     // Let's find difference
     const docsToAdd = new Map(
       Array.from(changeset.entries()).filter(
         ([it]) =>
           !serverChangeset.has(it) ||
-          (serverChangeset.has(it) && doTrim(serverChangeset.get(it)) !== doTrim(changeset.get(it)))
+          (serverChangeset.has(it) && doTrimHash(serverChangeset.get(it)) !== doTrimHash(changeset.get(it)))
       )
     )
     const docsToRemove = Array.from(serverChangeset.keys()).filter((it) => !changeset.has(it))
diff --git a/server/core/src/__tests__/memAdapters.ts b/server/core/src/__tests__/memAdapters.ts
index a3109c6a59..02060086e2 100644
--- a/server/core/src/__tests__/memAdapters.ts
+++ b/server/core/src/__tests__/memAdapters.ts
@@ -56,7 +56,7 @@ export class MemStorageAdapter implements StorageAdapter {
     const files = Array.from(this.files.values()).filter((it) => it.workspace === workspaceId.name)
     return {
       next: async () => {
-        return files.shift()
+        return files.splice(0, 100)
       },
       close: async () => {}
     }
@@ -189,8 +189,7 @@ export class MemRawDBAdapter implements RawDBAdapter {
     }
     return {
       next: async () => {
-        const doc = result.shift()
-        return doc
+        return result.splice(0, 50)
       },
       close: async () => {}
     }
diff --git a/server/core/src/adapter.ts b/server/core/src/adapter.ts
index 2c14a685dd..5d25a954b6 100644
--- a/server/core/src/adapter.ts
+++ b/server/core/src/adapter.ts
@@ -57,7 +57,7 @@ export interface DomainHelper {
 }
 
 export interface RawDBAdapterStream<T extends Doc> {
-  next: () => Promise<T | undefined>
+  next: () => Promise<T[]>
   close: () => Promise<void>
 }
 
diff --git a/server/core/src/mem.ts b/server/core/src/mem.ts
index 60fa7c117a..8b615044d6 100644
--- a/server/core/src/mem.ts
+++ b/server/core/src/mem.ts
@@ -86,7 +86,7 @@ export class DummyDbAdapter implements DbAdapter {
 
   find (ctx: MeasureContext, domain: Domain): StorageIterator {
     return {
-      next: async () => undefined,
+      next: async () => [],
       close: async () => {}
     }
   }
diff --git a/server/core/src/server/aggregator.ts b/server/core/src/server/aggregator.ts
index d000c7a2b0..e176aeb6b0 100644
--- a/server/core/src/server/aggregator.ts
+++ b/server/core/src/server/aggregator.ts
@@ -99,31 +99,20 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
   find (ctx: MeasureContext, workspaceId: WorkspaceId): StorageIterator {
     const storageIterator = this.makeStorageIterator(ctx, workspaceId)
 
-    let buffer: ListBlobResult[] = []
-
     return {
-      next: async (ctx) => {
-        const docInfo = await storageIterator.next()
-        if (docInfo !== undefined) {
-          buffer.push(docInfo)
+      next: async () => {
+        const docInfos = await storageIterator.next()
+        if (docInfos.length > 0) {
+          await this.doSyncDocs(ctx, workspaceId, docInfos)
         }
-        if (buffer.length > 50) {
-          await this.doSyncDocs(ctx, workspaceId, buffer)
 
-          buffer = []
-        }
-        if (docInfo !== undefined) {
-          return {
-            hash: docInfo.etag,
-            id: docInfo._id,
-            size: docInfo.size
-          }
-        }
+        return docInfos.map((it) => ({
+          hash: it.etag,
+          id: it._id,
+          size: it.size
+        }))
       },
       close: async (ctx) => {
-        if (buffer.length > 0) {
-          await this.doSyncDocs(ctx, workspaceId, buffer)
-        }
         await storageIterator.close()
       }
     }
@@ -134,22 +123,21 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
     let iterator: BlobStorageIterator | undefined
     return {
       next: async () => {
-        while (true) {
-          if (iterator === undefined && adapters.length > 0) {
-            iterator = await (adapters.shift() as StorageAdapter).listStream(ctx, workspaceId)
-          }
-          if (iterator === undefined) {
-            return undefined
-          }
-          const docInfo = await iterator.next()
-          if (docInfo !== undefined) {
-            // We need to check if our stored version is fine
-            return docInfo
-          } else {
-            // We need to take next adapter
-            await iterator.close()
-            iterator = undefined
-          }
+        if (iterator === undefined && adapters.length > 0) {
+          iterator = await (adapters.shift() as StorageAdapter).listStream(ctx, workspaceId)
+        }
+        if (iterator === undefined) {
+          return []
+        }
+        const docInfos = await iterator.next()
+        if (docInfos.length > 0) {
+          // We need to check if our stored version is fine
+          return docInfos
+        } else {
+          // We need to take next adapter
+          await iterator.close()
+          iterator = undefined
+          return []
         }
       },
       close: async () => {
@@ -227,7 +215,7 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
   async listStream (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<BlobStorageIterator> {
     const data = await this.dbAdapter.findStream<Blob>(ctx, workspaceId, DOMAIN_BLOB, {})
     return {
-      next: async (): Promise<ListBlobResult | undefined> => {
+      next: async (): Promise<ListBlobResult[]> => {
         return await data.next()
       },
       close: async () => {
diff --git a/server/core/src/storage.ts b/server/core/src/storage.ts
index e252bd4bd9..8d436aa733 100644
--- a/server/core/src/storage.ts
+++ b/server/core/src/storage.ts
@@ -9,6 +9,7 @@ import {
   type StorageIterator,
   type WorkspaceId
 } from '@hcengineering/core'
+import { estimateDocSize } from './utils'
 
 export * from '@hcengineering/storage'
 
@@ -19,7 +20,7 @@ export function getBucketId (workspaceId: WorkspaceId): string {
   return toWorkspaceString(workspaceId)
 }
 
-const chunkSize = 2 * 1024 * 1024
+const chunkSize = 512 * 1024
 
 /**
  * @public
@@ -70,14 +71,15 @@ export class BackupClientOps {
       const docs: DocInfo[] = []
 
       while (size < chunkSize) {
-        const doc = await chunk.iterator.next(ctx)
-        if (doc === undefined) {
+        const _docs = await chunk.iterator.next(ctx)
+        if (_docs.length === 0) {
           chunk.finished = true
           break
         }
-
-        size += doc.size
-        docs.push(doc)
+        for (const doc of _docs) {
+          size += estimateDocSize(doc)
+          docs.push(doc)
+        }
       }
 
       return {
diff --git a/server/minio/src/index.ts b/server/minio/src/index.ts
index f0987da53f..1a56fba05c 100644
--- a/server/minio/src/index.ts
+++ b/server/minio/src/index.ts
@@ -192,7 +192,7 @@ export class MinioService implements StorageAdapter {
 
     const rootPrefix = this.rootPrefix(workspaceId)
     return {
-      next: async (): Promise<ListBlobResult | undefined> => {
+      next: async (): Promise<ListBlobResult[]> => {
         try {
           if (stream === undefined && !done) {
             const rprefix = rootPrefix ?? ''
@@ -227,7 +227,7 @@ export class MinioService implements StorageAdapter {
                 })
               }
               onNext()
-              if (buffer.length > 5) {
+              if (buffer.length > 100) {
                 stream?.pause()
               }
             })
@@ -236,24 +236,24 @@ export class MinioService implements StorageAdapter {
           const msg = (err?.message as string) ?? ''
           if (msg.includes('Invalid bucket name') || msg.includes('The specified bucket does not exist')) {
             hasMore = false
-            return
+            return []
           }
           error = err
         }
 
         if (buffer.length > 0) {
-          return buffer.shift()
+          return buffer.splice(0, 50)
         }
         if (!hasMore) {
-          return undefined
+          return []
         }
-        return await new Promise<ListBlobResult | undefined>((resolve, reject) => {
+        return await new Promise<ListBlobResult[]>((resolve, reject) => {
           onNext = () => {
             if (error != null) {
               reject(error)
             }
             onNext = () => {}
-            resolve(buffer.shift())
+            resolve(buffer.splice(0, 50))
           }
           stream?.resume()
         })
diff --git a/server/mongo/src/rawAdapter.ts b/server/mongo/src/rawAdapter.ts
index e1b0a1135d..f677e4eda5 100644
--- a/server/mongo/src/rawAdapter.ts
+++ b/server/mongo/src/rawAdapter.ts
@@ -105,7 +105,17 @@ export function createRawMongoDBAdapter (url: string): RawDBAdapter {
       const { cursor } = await getCursor(workspace, domain, query, options)
 
       return {
-        next: async () => (await cursor.next()) ?? undefined,
+        next: async () => {
+          const result: T[] = []
+          const doc = await cursor.next()
+          if (doc != null) {
+            result.push(doc)
+          }
+          if (cursor.bufferedCount() > 0) {
+            result.push(...cursor.readBufferedDocuments())
+          }
+          return result
+        },
         close: async () => {
           await cursor.close()
         }
diff --git a/server/mongo/src/storage.ts b/server/mongo/src/storage.ts
index 6e5a2d733c..48b982d49b 100644
--- a/server/mongo/src/storage.ts
+++ b/server/mongo/src/storage.ts
@@ -16,7 +16,6 @@
 import core, {
   DOMAIN_MODEL,
   DOMAIN_TX,
-  type Iterator,
   SortingOrder,
   TxProcessor,
   addOperation,
@@ -30,6 +29,7 @@ import core, {
   type AttachedDoc,
   type Class,
   type Doc,
+  type DocInfo,
   type DocumentQuery,
   type DocumentUpdate,
   type Domain,
@@ -38,6 +38,7 @@ import core, {
   type FindOptions,
   type FindResult,
   type Hierarchy,
+  type Iterator,
   type Lookup,
   type MeasureContext,
   type Mixin,
@@ -135,7 +136,7 @@ export async function toArray<T> (cursor: AbstractCursor<T>): Promise<T[]> {
 }
 
 export interface DbAdapterOptions {
-  calculateHash?: (doc: Doc) => string
+  calculateHash?: (doc: Doc) => { digest: string, size: number }
 }
 
 abstract class MongoAdapterBase implements DbAdapter {
@@ -1034,44 +1035,17 @@ abstract class MongoAdapterBase implements DbAdapter {
           iterator = coll.find({ '%hash%': { $in: ['', null] } })
           d = await ctx.with('next', { mode }, async () => await iterator.next())
         }
-        if (d == null) {
-          return undefined
+        const result: DocInfo[] = []
+        if (d != null) {
+          result.push(this.toDocInfo(d, bulkUpdate))
         }
-        let digest: string | null = (d as any)['%hash%']
-        if ('%hash%' in d) {
-          delete d['%hash%']
-        }
-        const pos = (digest ?? '').indexOf('|')
-        if (digest == null || digest === '') {
-          const cs = ctx.newChild('calc-size', {})
-          const size = estimateDocSize(d)
-          cs.end()
-
-          if (this.options?.calculateHash !== undefined) {
-            digest = this.options.calculateHash(d)
-          } else {
-            const hash = createHash('sha256')
-            updateHashForDoc(hash, d)
-            digest = hash.digest('base64')
-          }
-
-          bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
-
-          await ctx.with('flush', {}, async () => {
-            await flush()
-          })
-          return {
-            id: d._id,
-            hash: digest,
-            size
-          }
-        } else {
-          return {
-            id: d._id,
-            hash: digest.slice(0, pos),
-            size: parseInt(digest.slice(pos + 1), 16)
-          }
+        if (iterator.bufferedCount() > 0) {
+          result.push(...iterator.readBufferedDocuments().map((it) => this.toDocInfo(it, bulkUpdate)))
         }
+        await ctx.with('flush', {}, async () => {
+          await flush()
+        })
+        return result
       },
       close: async () => {
         await ctx.with('flush', {}, async () => {
@@ -1085,6 +1059,38 @@ abstract class MongoAdapterBase implements DbAdapter {
     }
   }
 
+  private toDocInfo (d: Doc, bulkUpdate: Map<Ref<Doc>, string>): DocInfo {
+    let digest: string | null = (d as any)['%hash%']
+    if ('%hash%' in d) {
+      delete d['%hash%']
+    }
+    const pos = (digest ?? '').indexOf('|')
+    if (digest == null || digest === '') {
+      let size = estimateDocSize(d)
+
+      if (this.options?.calculateHash !== undefined) {
+        ;({ digest, size } = this.options.calculateHash(d))
+      } else {
+        const hash = createHash('sha256')
+        updateHashForDoc(hash, d)
+        digest = hash.digest('base64')
+      }
+
+      bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
+      return {
+        id: d._id,
+        hash: digest,
+        size
+      }
+    } else {
+      return {
+        id: d._id,
+        hash: digest.slice(0, pos),
+        size: parseInt(digest.slice(pos + 1), 16)
+      }
+    }
+  }
+
   async load (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
     return await ctx.with('load', { domain }, async () => {
       if (docs.length === 0) {
diff --git a/server/mongo/src/utils.ts b/server/mongo/src/utils.ts
index 8f555a3ff8..616e8dab30 100644
--- a/server/mongo/src/utils.ts
+++ b/server/mongo/src/utils.ts
@@ -23,7 +23,7 @@ import {
 } from '@hcengineering/core'
 import { PlatformError, unknownStatus } from '@hcengineering/platform'
 import { type DomainHelperOperations } from '@hcengineering/server-core'
-import { MongoClient, type Collection, type Db, type Document, type MongoClientOptions } from 'mongodb'
+import { MongoClient, type Collection, type Db, type Document } from 'mongodb'
 
 const connections = new Map<string, MongoClientReferenceImpl>()
 
@@ -121,31 +121,20 @@ export class ClientRef implements MongoClientReference {
  * Initialize a workspace connection to DB
  * @public
  */
-export function getMongoClient (uri: string, options?: MongoClientOptions): MongoClientReference {
+export function getMongoClient (uri: string): MongoClientReference {
   const extraOptions = JSON.parse(process.env.MONGO_OPTIONS ?? '{}')
-  const key = `${uri}${process.env.MONGO_OPTIONS ?? '{}'}_${JSON.stringify(options ?? {})}`
+  const key = `${uri}${process.env.MONGO_OPTIONS ?? '{}'}`
   let existing = connections.get(key)
 
-  const allOptions: MongoClientOptions = {
-    ...options,
-    ...extraOptions
-  }
-
-  // Make poll size stable
-  if (allOptions.maxPoolSize !== undefined) {
-    allOptions.minPoolSize = allOptions.maxPoolSize
-  }
-  allOptions.monitorCommands = false
-  allOptions.noDelay = true
-
   // If not created or closed
   if (existing === undefined) {
     existing = new MongoClientReferenceImpl(
       MongoClient.connect(uri, {
+        retryReads: true,
         appName: 'transactor',
         enableUtf8Validation: false,
 
-        ...allOptions
+        ...extraOptions
       }),
       () => {
         connections.delete(key)
diff --git a/server/postgres/src/storage.ts b/server/postgres/src/storage.ts
index 5cb5eeceb1..7f50259e79 100644
--- a/server/postgres/src/storage.ts
+++ b/server/postgres/src/storage.ts
@@ -18,6 +18,7 @@ import core, {
   type AttachedDoc,
   type Class,
   type Doc,
+  type DocInfo,
   type DocumentQuery,
   type DocumentUpdate,
   type Domain,
@@ -941,12 +942,12 @@ abstract class PostgresAdapterBase implements DbAdapter {
       )
     }
 
-    const next = async (): Promise<Doc | null> => {
-      const result = await client.query(`FETCH 1 FROM ${cursorName}`)
+    const next = async (limit: number): Promise<Doc[]> => {
+      const result = await client.query(`FETCH ${limit} FROM ${cursorName}`)
       if (result.rows.length === 0) {
-        return null
+        return []
       }
-      return result.rows[0] !== undefined ? parseDoc(result.rows[0]) : null
+      return result.rows.filter((it) => it != null).map((it) => parseDoc(it))
     }
 
     const flush = async (flush = false): Promise<void> => {
@@ -975,47 +976,51 @@ abstract class PostgresAdapterBase implements DbAdapter {
           await init('_id, data', "data ->> '%hash%' IS NOT NULL AND data ->> '%hash%' <> ''")
           initialized = true
         }
-        let d = await ctx.with('next', { mode }, async () => await next())
-        if (d == null && mode === 'hashed') {
+        let docs = await ctx.with('next', { mode }, async () => await next(50))
+        if (docs.length === 0 && mode === 'hashed') {
           await close(cursorName)
           mode = 'non_hashed'
           await init('*', "data ->> '%hash%' IS NULL OR data ->> '%hash%' = ''")
-          d = await ctx.with('next', { mode }, async () => await next())
+          docs = await ctx.with('next', { mode }, async () => await next(50))
         }
-        if (d == null) {
-          return undefined
+        if (docs.length === 0) {
+          return []
         }
-        let digest: string | null = (d as any)['%hash%']
-        if ('%hash%' in d) {
-          delete d['%hash%']
-        }
-        const pos = (digest ?? '').indexOf('|')
-        if (digest == null || digest === '') {
-          const cs = ctx.newChild('calc-size', {})
-          const size = estimateDocSize(d)
-          cs.end()
-
-          const hash = createHash('sha256')
-          updateHashForDoc(hash, d)
-          digest = hash.digest('base64')
-
-          bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
-
-          await ctx.with('flush', {}, async () => {
-            await flush()
-          })
-          return {
-            id: d._id,
-            hash: digest,
-            size
+        const result: DocInfo[] = []
+        for (const d of docs) {
+          let digest: string | null = (d as any)['%hash%']
+          if ('%hash%' in d) {
+            delete d['%hash%']
           }
-        } else {
-          return {
-            id: d._id,
-            hash: digest.slice(0, pos),
-            size: parseInt(digest.slice(pos + 1), 16)
+          const pos = (digest ?? '').indexOf('|')
+          if (digest == null || digest === '') {
+            const cs = ctx.newChild('calc-size', {})
+            const size = estimateDocSize(d)
+            cs.end()
+
+            const hash = createHash('sha256')
+            updateHashForDoc(hash, d)
+            digest = hash.digest('base64')
+
+            bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
+
+            await ctx.with('flush', {}, async () => {
+              await flush()
+            })
+            result.push({
+              id: d._id,
+              hash: digest,
+              size
+            })
+          } else {
+            result.push({
+              id: d._id,
+              hash: digest.slice(0, pos),
+              size: parseInt(digest.slice(pos + 1), 16)
+            })
           }
         }
+        return result
       },
       close: async () => {
         await ctx.with('flush', {}, async () => {
diff --git a/server/s3/src/index.ts b/server/s3/src/index.ts
index cc2c435481..1314ebd98e 100644
--- a/server/s3/src/index.ts
+++ b/server/s3/src/index.ts
@@ -239,9 +239,9 @@ export class S3Service implements StorageAdapter {
 
     const rootPrefix = this.rootPrefix(workspaceId)
     return {
-      next: async (): Promise<ListBlobResult | undefined> => {
+      next: async (): Promise<ListBlobResult[]> => {
         try {
-          if (hasMore && buffer.length === 0) {
+          while (hasMore && buffer.length < 50) {
             const res = await this.client.listObjectsV2({
               Bucket: this.getBucketId(workspaceId),
               Prefix: rootPrefix ?? '',
@@ -271,12 +271,7 @@ export class S3Service implements StorageAdapter {
         } catch (err: any) {
           ctx.error('Failed to get list', { error: err, workspaceId: workspaceId.name })
         }
-        if (buffer.length > 0) {
-          return buffer.shift()
-        }
-        if (!hasMore) {
-          return undefined
-        }
+        return buffer.splice(0, 50)
       },
       close: async () => {}
     }
diff --git a/server/server-storage/src/blobStorage.ts b/server/server-storage/src/blobStorage.ts
index 5f9202d98e..a44d147ad0 100644
--- a/server/server-storage/src/blobStorage.ts
+++ b/server/server-storage/src/blobStorage.ts
@@ -170,9 +170,9 @@ export async function createStorageDataAdapter (
     calculateHash: (d) => {
       const blob = d as Blob
       if (storageEx?.adapters !== undefined && storageEx.adapters.get(blob.provider) === undefined) {
-        return blob.etag + '_' + storageEx.defaultAdapter // Replace tag to be able to move to new provider
+        return { digest: blob.etag + '_' + storageEx.defaultAdapter, size: blob.size }
       }
-      return blob.etag
+      return { digest: blob.etag, size: blob.size }
     }
   })
   return new StorageBlobAdapter(workspaceId, storage, ctx, blobAdapter)