Merge remote-tracking branch 'origin/develop' into staging

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2024-09-25 21:24:14 +07:00
commit 30bf0cb16c
No known key found for this signature in database
GPG Key ID: BD80F68D68D8F7F2
19 changed files with 289 additions and 111 deletions

13
.vscode/launch.json vendored
View File

@ -8,19 +8,12 @@
"name": "Debug notion import",
"type": "node",
"request": "launch",
// "args": ["src/__start.ts", "import-notion-to-teamspace", "/home/anna/work/notion/natalya/Export-fad9ecb4-a1a5-4623-920d-df32dd423743", "-ws", "w-user1-ws1-66d8018b-ce1e0c3164-006bb0", "-ts", "notion"],
"args": ["src/__start.ts", "import-notion", "/home/anna/work/notion/natalya/Export-fad9ecb4-a1a5-4623-920d-df32dd423743", "-ws", "w-user1-ws1-66d8018b-ce1e0c3164-006bb0"],
"args": ["src/__start.ts", "import-notion-to-teamspace", "/home/anna/work/notion/natalya/Export-fad9ecb4-a1a5-4623-920d-df32dd423743", "-ws", "notion-test", "-u", "user1", "-pw", "1234", "-ts", "natalya"],
// "args": ["src/__start.ts", "import-notion-with-teamspaces", "/home/anna/work/notion/natalya/Export-fad9ecb4-a1a5-4623-920d-df32dd423743", "-ws", "ws1", "-u", "user1", "-pw", "1234"],
"env": {
"SERVER_SECRET": "secret",
"MINIO_ACCESS_KEY": "minioadmin",
"MINIO_SECRET_KEY": "minioadmin",
"MINIO_ENDPOINT": "localhost",
"TRANSACTOR_URL": "ws://localhost:3333",
"MONGO_URL": "mongodb://localhost:27017",
"FRONT_URL": "http://localhost:8087",
"ACCOUNTS_URL": "http://localhost:3000",
"TELEGRAM_DATABASE": "telegram-service",
"ELASTIC_URL": "http://localhost:9200",
"REKONI_URL": "http://localhost:4004"
},
"runtimeVersion": "20",
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],

View File

@ -50,28 +50,33 @@ rushx run-local import-notion-to-teamspace /home/john/extracted-notion-docs \
--teamspace imported
```
#### For clouds:
#### For cloud deployment:
##### Here is an example for those who's using huly.app cloud:
To import Notion workspace with teamspaces
1. Go to the root folder of the extracted archive with exported data.
2. Run import command as follow:
* To import Notion workspace with teamspaces
```
docker run \
-e SERVER_SECRET="" \
-e ACCOUNTS_URL="https://account.huly.app" \
-e FRONT_URL="https://huly.app" \
-v $(pwd):/data \
hardcoreeng/tool:latest \
-- bundle.js import-notion-with-teamspaces /tmp/notion-docs \
-- bundle.js import-notion-with-teamspaces /data \
--user jane.doe@gmail.com \
--password 4321qwe \
--workspace ws1
```
To import Notion workspace without teamspaces or a page with subpages.
* To import Notion workspace without teamspaces or a page with subpages.
```
docker run \
-e SERVER_SECRET="" \
-e ACCOUNTS_URL="https://account.huly.app" \
-e FRONT_URL="https://huly.app" \
-v $(pwd):/data \
hardcoreeng/tool:latest \
-- bundle.js import-notion-to-teamspace /tmp/notion-docs \
-- bundle.js import-notion-to-teamspace /data \
--user jane.doe@gmail.com \
--password 4321qwe \
--workspace ws1 \

View File

@ -55,7 +55,7 @@ import serverClientPlugin, {
} from '@hcengineering/server-client'
import { getServerPipeline } from '@hcengineering/server-pipeline'
import serverToken, { decodeToken, generateToken } from '@hcengineering/server-token'
import toolPlugin, { connect, FileModelLogger } from '@hcengineering/server-tool'
import toolPlugin, { FileModelLogger } from '@hcengineering/server-tool'
import { createWorkspace, upgradeWorkspace } from '@hcengineering/workspace-service'
import path from 'path'
@ -75,7 +75,6 @@ import core, {
systemAccountEmail,
TxOperations,
versionToString,
type Client as CoreClient,
type Data,
type Doc,
type Ref,
@ -297,17 +296,13 @@ export function devTool (
}
}
const connection = (await connect(
selectedWs.endpoint,
{
name: selectedWs.workspaceId
},
undefined,
{
mode: 'backup'
}
)) as unknown as CoreClient
const client = new TxOperations(connection, core.account.System)
const connection = await createClient(selectedWs.endpoint, selectedWs.token)
const acc = connection.getModel().getAccountByEmail(user)
if (acc === undefined) {
console.log('Account not found for email: ', user)
return
}
const client = new TxOperations(connection, acc._id)
await importNotion(client, uploader(selectedWs.token), dir, teamspace)
await connection.close()
}

View File

@ -201,18 +201,12 @@ export function createModel (builder: Builder): void {
{ createdBy: 1 },
{ createdBy: -1 },
{ createdOn: -1 },
{ modifiedBy: 1 },
{ objectSpace: 1 }
{ modifiedBy: 1 }
],
indexes: [
{
keys: {
objectSpace: 1,
_id: 1,
modifiedOn: 1
},
filter: {
objectSpace: core.space.Model
objectSpace: 1
}
}
]

View File

@ -659,6 +659,16 @@ export interface DomainIndexConfiguration extends Doc {
export type WorkspaceMode = 'pending-creation' | 'creating' | 'upgrading' | 'pending-deletion' | 'deleting' | 'active'
export interface BackupStatus {
dataSize: number
blobsSize: number
backupSize: number
lastBackup: Timestamp
backups: number
}
export interface BaseWorkspaceInfo {
workspace: string // An uniq workspace name, Database names
disabled?: boolean
@ -676,4 +686,6 @@ export interface BaseWorkspaceInfo {
progress?: number // Some progress
endpoint: string
backupInfo?: BackupStatus
}

View File

@ -13,7 +13,7 @@
// limitations under the License.
//
import { AccountRole, Doc, Ref, Timestamp, WorkspaceMode } from '@hcengineering/core'
import { AccountRole, Doc, Ref, Timestamp, WorkspaceMode, type BackupStatus } from '@hcengineering/core'
import type { Asset, IntlString, Metadata, Plugin, Resource, Status } from '@hcengineering/platform'
import { plugin } from '@hcengineering/platform'
import type { AnyComponent } from '@hcengineering/ui'
@ -35,6 +35,8 @@ export interface Workspace {
progress?: number
lastVisit: number
backupInfo?: BackupStatus
}
/**

View File

@ -37,9 +37,23 @@
import { workspacesStore } from '../utils'
// import Drag from './icons/Drag.svelte'
function getLastVisitDays (it: Workspace): number {
return Math.floor((Date.now() - it.lastVisit) / (1000 * 3600 * 24))
}
onMount(() => {
void getResource(login.function.GetWorkspaces).then(async (f) => {
const workspaces = await f()
workspaces.sort((a, b) => {
const adays = getLastVisitDays(a)
const bdays = getLastVisitDays(a)
if (adays === bdays) {
return (b.backupInfo?.backupSize ?? 0) - (a.backupInfo?.backupSize ?? 0)
}
return bdays - adays
})
$workspacesStore = workspaces
})
})
@ -181,6 +195,9 @@
{wsName}
{#if isAdmin && ws.lastVisit != null && ws.lastVisit !== 0}
<div class="text-sm">
{#if ws.backupInfo != null}
{ws.backupInfo.backupSize}Mb -
{/if}
({lastUsageDays} days)
</div>
{/if}

View File

@ -44,6 +44,7 @@ import core, {
Version,
versionToString,
WorkspaceId,
type BackupStatus,
type Branding,
type WorkspaceMode
} from '@hcengineering/core'
@ -1356,6 +1357,40 @@ export async function updateWorkspaceInfo (
)
}
/**
* @public
*/
export async function updateBackupInfo (
ctx: MeasureContext,
db: Db,
branding: Branding | null,
token: string,
backupInfo: BackupStatus
): Promise<void> {
const decodedToken = decodeToken(ctx, token)
if (decodedToken.extra?.service !== 'backup') {
throw new PlatformError(new Status(Severity.ERROR, platform.status.Forbidden, {}))
}
const workspaceInfo = await getWorkspaceById(db, decodedToken.workspace.name)
if (workspaceInfo === null) {
throw new PlatformError(
new Status(Severity.ERROR, platform.status.WorkspaceNotFound, { workspace: decodedToken.workspace.name })
)
}
const wsCollection = db.collection<Omit<Workspace, '_id'>>(WORKSPACE_COLLECTION)
await wsCollection.updateOne(
{ _id: workspaceInfo._id },
{
$set: {
backupInfo,
lastProcessingTime: Date.now()
}
}
)
}
async function postCreateUserWorkspace (
ctx: MeasureContext,
db: Db,
@ -2747,6 +2782,7 @@ export function getMethods (): Record<string, AccountMethod> {
// Workspace service methods
getPendingWorkspace: wrap(getPendingWorkspace),
updateWorkspaceInfo: wrap(updateWorkspaceInfo),
updateBackupInfo: wrap(updateBackupInfo),
workerHandshake: wrap(workerHandshake)
}
}

View File

@ -49,6 +49,7 @@ import { Writable } from 'stream'
import { extract, Pack, pack } from 'tar-stream'
import { createGunzip, gunzipSync, gzipSync } from 'zlib'
import { BackupStorage } from './storage'
import type { BackupStatus } from '@hcengineering/core/src/classes'
export * from './storage'
const dataBlobSize = 50 * 1024 * 1024
@ -498,6 +499,10 @@ function doTrimHash (s: string | undefined): string {
return s
}
export interface BackupResult extends Omit<BackupStatus, 'backups' | 'lastBackup'> {
result: boolean
}
/**
* @public
*/
@ -531,7 +536,13 @@ export async function backup (
skipBlobContentTypes: [],
blobDownloadLimit: 15
}
): Promise<boolean> {
): Promise<BackupResult> {
const result: BackupResult = {
result: false,
dataSize: 0,
blobsSize: 0,
backupSize: 0
}
ctx = ctx.newChild('backup', {
workspaceId: workspaceId.name,
force: options.force,
@ -589,7 +600,8 @@ export async function backup (
if (lastTx._id === backupInfo.lastTxId && !options.force) {
printEnd = false
ctx.info('No transaction changes. Skipping backup.', { workspace: workspaceId.name })
return false
result.result = false
return result
}
}
lastTxChecked = true
@ -613,14 +625,19 @@ export async function backup (
)) as CoreClient & BackupClient)
if (!lastTxChecked) {
lastTx = await connection.findOne(core.class.Tx, {}, { limit: 1, sort: { modifiedOn: SortingOrder.Descending } })
lastTx = await connection.findOne(
core.class.Tx,
{ objectSpace: { $ne: core.space.Model } },
{ limit: 1, sort: { modifiedOn: SortingOrder.Descending } }
)
if (lastTx !== undefined) {
if (lastTx._id === backupInfo.lastTxId && !options.force) {
ctx.info('No transaction changes. Skipping backup.', { workspace: workspaceId.name })
if (options.getConnection === undefined) {
await connection.close()
}
return false
result.result = false
return result
}
}
}
@ -700,6 +717,11 @@ export async function backup (
let currentNeedRetrieveSize = 0
for (const { id, hash, size } of currentChunk.docs) {
if (domain === DOMAIN_BLOB) {
result.blobsSize += size
} else {
result.dataSize += size
}
processed++
if (Date.now() - st > 2500) {
ctx.info('processed', {
@ -1034,10 +1056,33 @@ export async function backup (
backupInfo.lastTxId = lastTx?._id ?? '0' // We could store last tx, since full backup is complete
await storage.writeFile(infoFile, gzipSync(JSON.stringify(backupInfo, undefined, 2), { level: defaultLevel }))
}
return true
result.result = true
const addFileSize = async (file: string | undefined | null): Promise<void> => {
if (file != null && (await storage.exists(file))) {
const fileSize = await storage.stat(file)
result.backupSize += fileSize
}
}
// Let's calculate data size for backup
for (const sn of backupInfo.snapshots) {
for (const [, d] of Object.entries(sn.domains)) {
await addFileSize(d.snapshot)
for (const snp of d.snapshots ?? []) {
await addFileSize(snp)
}
for (const snp of d.storage ?? []) {
await addFileSize(snp)
}
}
}
await addFileSize(infoFile)
return result
} catch (err: any) {
ctx.error('backup error', { err, workspace: workspaceId.name })
return false
return result
} finally {
if (printEnd) {
ctx.info('end backup', { workspace: workspaceId.name, totalTime: Date.now() - st })

View File

@ -13,7 +13,7 @@
// limitations under the License.
//
import {
import core, {
BaseWorkspaceInfo,
DOMAIN_TX,
getWorkspaceId,
@ -22,6 +22,7 @@ import {
SortingOrder,
systemAccountEmail,
type BackupClient,
type BackupStatus,
type Branding,
type Client,
type MeasureContext,
@ -29,7 +30,7 @@ import {
type WorkspaceIdWithUrl
} from '@hcengineering/core'
import { PlatformError, unknownError } from '@hcengineering/platform'
import { listAccountWorkspaces } from '@hcengineering/server-client'
import { listAccountWorkspaces, updateBackupInfo } from '@hcengineering/server-client'
import {
BackupClientOps,
SessionDataImpl,
@ -38,6 +39,7 @@ import {
type PipelineFactory,
type StorageAdapter
} from '@hcengineering/server-core'
import { generateToken } from '@hcengineering/server-token'
import { backup } from '.'
import { createStorageBackupStorage } from './storage'
export interface BackupConfig {
@ -114,6 +116,23 @@ class BackupWorker {
): Promise<{ failedWorkspaces: BaseWorkspaceInfo[], processed: number, skipped: number }> {
const workspacesIgnore = new Set(this.config.SkipWorkspaces.split(';'))
const workspaces = (await listAccountWorkspaces(this.config.Token)).filter((it) => {
const lastBackup = it.backupInfo?.lastBackup ?? 0
if ((Date.now() - lastBackup) / 1000 < this.config.Interval) {
// No backup required, interval not elapsed
ctx.info('Skip backup', { workspace: it.workspace, lastBackup: Math.round((Date.now() - lastBackup) / 1000) })
return false
}
const lastVisitSec = Math.floor((Date.now() - it.lastVisit) / 1000)
if (lastVisitSec > this.config.Interval) {
// No backup required, interval not elapsed
ctx.info('Skip backup, since not visited since last check', {
workspace: it.workspace,
days: Math.floor(lastVisitSec / 3600 / 24),
seconds: lastVisitSec
})
return false
}
return !workspacesIgnore.has(it.workspace)
})
workspaces.sort((a, b) => b.lastVisit - a.lastVisit)
@ -133,6 +152,7 @@ class BackupWorker {
return { failedWorkspaces, processed, skipped: workspaces.length - processed }
}
index++
const st = Date.now()
rootCtx.warn('\n\nBACKUP WORKSPACE ', {
workspace: ws.workspace,
index,
@ -156,56 +176,73 @@ class BackupWorker {
workspaceName: ws.workspaceName ?? '',
workspaceUrl: ws.workspaceUrl ?? ''
}
processed += (await ctx.with(
'backup',
{ workspace: ws.workspace },
async (ctx) =>
await backup(ctx, '', getWorkspaceId(ws.workspace), storage, {
skipDomains: [],
force: false,
recheck: false,
timeout: this.config.Timeout * 1000,
connectTimeout: 5 * 60 * 1000, // 5 minutes to,
blobDownloadLimit: 100,
skipBlobContentTypes: [],
storageAdapter: this.workspaceStorageAdapter,
getLastTx: async (): Promise<Tx | undefined> => {
const config = this.getConfig(ctx, wsUrl, null, this.workspaceStorageAdapter)
const adapterConf = config.adapters[config.domains[DOMAIN_TX]]
const hierarchy = new Hierarchy()
const modelDb = new ModelDb(hierarchy)
const txAdapter = await adapterConf.factory(
ctx,
hierarchy,
adapterConf.url,
wsUrl,
modelDb,
this.workspaceStorageAdapter
)
try {
await txAdapter.init?.()
const result = await ctx.with('backup', { workspace: ws.workspace }, (ctx) =>
backup(ctx, '', getWorkspaceId(ws.workspace), storage, {
skipDomains: [],
force: true,
recheck: false,
timeout: this.config.Timeout * 1000,
connectTimeout: 5 * 60 * 1000, // 5 minutes to,
blobDownloadLimit: 100,
skipBlobContentTypes: [],
storageAdapter: this.workspaceStorageAdapter,
getLastTx: async (): Promise<Tx | undefined> => {
const config = this.getConfig(ctx, wsUrl, null, this.workspaceStorageAdapter)
const adapterConf = config.adapters[config.domains[DOMAIN_TX]]
const hierarchy = new Hierarchy()
const modelDb = new ModelDb(hierarchy)
const txAdapter = await adapterConf.factory(
ctx,
hierarchy,
adapterConf.url,
wsUrl,
modelDb,
this.workspaceStorageAdapter
)
try {
await txAdapter.init?.()
return (
await txAdapter.rawFindAll<Tx>(
DOMAIN_TX,
{},
{ limit: 1, sort: { modifiedOn: SortingOrder.Descending } }
)
).shift()
} finally {
await txAdapter.close()
}
},
getConnection: async () => {
if (pipeline === undefined) {
pipeline = await this.pipelineFactory(ctx, wsUrl, true, () => {}, null)
}
return this.wrapPipeline(ctx, pipeline, wsUrl)
return (
await txAdapter.rawFindAll<Tx>(
DOMAIN_TX,
{ objectSpace: { $ne: core.space.Model } },
{ limit: 1, sort: { modifiedOn: SortingOrder.Descending } }
)
).shift()
} finally {
await txAdapter.close()
}
})
))
? 1
: 0
},
getConnection: async () => {
if (pipeline === undefined) {
pipeline = await this.pipelineFactory(ctx, wsUrl, true, () => {}, null)
}
return this.wrapPipeline(ctx, pipeline, wsUrl)
}
})
)
if (result.result) {
const backupInfo: BackupStatus = {
backups: (ws.backupInfo?.backups ?? 0) + 1,
lastBackup: Date.now(),
backupSize: Math.round((result.backupSize * 100) / (1024 * 1024)) / 100,
dataSize: Math.round((result.dataSize * 100) / (1024 * 1024)) / 100,
blobsSize: Math.round((result.blobsSize * 100) / (1024 * 1024)) / 100
}
rootCtx.warn('\n\nBACKUP STATS ', {
workspace: ws.workspace,
index,
...backupInfo,
time: Math.round((Date.now() - st) / 1000),
total: workspaces.length
})
// We need to report update for stats to account service
processed += 1
const token = generateToken(systemAccountEmail, { name: ws.workspace }, { service: 'backup' })
await updateBackupInfo(token, backupInfo)
}
} catch (err: any) {
rootCtx.error('\n\nFAILED to BACKUP', { workspace: ws.workspace, err })
failedWorkspaces.push(ws)

View File

@ -1,6 +1,6 @@
import { MeasureContext, WorkspaceId } from '@hcengineering/core'
import { StorageAdapter } from '@hcengineering/server-core'
import { createReadStream, createWriteStream, existsSync } from 'fs'
import { createReadStream, createWriteStream, existsSync, statSync } from 'fs'
import { mkdir, readFile, rm, writeFile } from 'fs/promises'
import { dirname, join } from 'path'
import { PassThrough, Readable, Writable } from 'stream'
@ -14,6 +14,8 @@ export interface BackupStorage {
write: (name: string) => Promise<Writable>
writeFile: (name: string, data: string | Buffer) => Promise<void>
exists: (name: string) => Promise<boolean>
stat: (name: string) => Promise<number>
delete: (name: string) => Promise<void>
}
@ -41,6 +43,10 @@ class FileStorage implements BackupStorage {
return existsSync(join(this.root, name))
}
async stat (name: string): Promise<number> {
return statSync(join(this.root, name)).size
}
async delete (name: string): Promise<void> {
await rm(join(this.root, name))
}
@ -87,6 +93,15 @@ class AdapterStorage implements BackupStorage {
}
}
async stat (name: string): Promise<number> {
try {
const st = await this.client.stat(this.ctx, this.workspaceId, join(this.root, name))
return st?.size ?? 0
} catch (err: any) {
return 0
}
}
async delete (name: string): Promise<void> {
await this.client.remove(this.ctx, this.workspaceId, [join(this.root, name)])
}

View File

@ -13,7 +13,7 @@
// limitations under the License.
//
import { type BaseWorkspaceInfo, type Data, type Version } from '@hcengineering/core'
import { type BaseWorkspaceInfo, type Data, type Version, BackupStatus } from '@hcengineering/core'
import { getMetadata, PlatformError, unknownError } from '@hcengineering/platform'
import plugin from './plugin'
@ -47,6 +47,24 @@ export async function listAccountWorkspaces (token: string): Promise<BaseWorkspa
return (workspaces.result as BaseWorkspaceInfo[]) ?? []
}
export async function updateBackupInfo (token: string, info: BackupStatus): Promise<BaseWorkspaceInfo[]> {
const accountsUrl = getAccoutsUrlOrFail()
const workspaces = await (
await fetch(accountsUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
method: 'updateBackupInfo',
params: [token, info]
})
})
).json()
return (workspaces.result as BaseWorkspaceInfo[]) ?? []
}
export async function getTransactorEndpoint (
token: string,
kind: 'internal' | 'external' = 'internal',
@ -149,7 +167,10 @@ export async function workerHandshake (
})
}
export async function getWorkspaceInfo (token: string): Promise<BaseWorkspaceInfo | undefined> {
export async function getWorkspaceInfo (
token: string,
updateLastAccess = false
): Promise<BaseWorkspaceInfo | undefined> {
const accountsUrl = getAccoutsUrlOrFail()
const workspaceInfo = await (
await fetch(accountsUrl, {
@ -160,7 +181,7 @@ export async function getWorkspaceInfo (token: string): Promise<BaseWorkspaceInf
},
body: JSON.stringify({
method: 'getWorkspaceInfo',
params: []
params: updateLastAccess ? [true] : []
})
})
).json()

View File

@ -27,7 +27,7 @@ export async function getWorkspaceInfo (token: string): Promise<ClientWorkspaceI
},
body: JSON.stringify({
method: 'getWorkspaceInfo',
params: []
params: [true] // Collaborator should update last used time.
})
})
).json()

View File

@ -21,6 +21,7 @@ import {
type UploadedObjectInfo
} from '@hcengineering/storage'
import { Analytics } from '@hcengineering/analytics'
import { type RawDBAdapter } from '../adapter'
import serverCore from '../plugin'
import { type StorageConfig, type StorageConfiguration } from '../types'
@ -166,9 +167,19 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
@withContext('aggregator-make', {})
async make (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
for (const a of this.adapters.values()) {
if (!(await a.exists(ctx, workspaceId))) {
await a.make(ctx, workspaceId)
for (const [k, a] of this.adapters.entries()) {
try {
if (!(await a.exists(ctx, workspaceId))) {
await a.make(ctx, workspaceId)
}
} catch (err: any) {
ctx.error('failed to init adapter', { adapter: k, workspaceId, error: err })
// Do not throw error in case default adapter is ok
Analytics.handleError(err)
if (k === this.defaultAdapter) {
// We should throw in case default one is not valid
throw err
}
}
}
}

View File

@ -1664,9 +1664,8 @@ class MongoTxAdapter extends MongoAdapterBase implements TxAdapter {
@withContext('get-model')
async getModel (ctx: MeasureContext): Promise<Tx[]> {
const txCollection = this.db.collection<Tx>(DOMAIN_TX)
const exists = await txCollection.indexExists('objectSpace_fi_1__id_fi_1_modifiedOn_fi_1')
const cursor = await ctx.with('find', {}, async () => {
let c = txCollection.find(
const c = txCollection.find(
{ objectSpace: core.space.Model },
{
sort: {
@ -1675,9 +1674,6 @@ class MongoTxAdapter extends MongoAdapterBase implements TxAdapter {
}
}
)
if (exists) {
c = c.hint({ objectSpace: 1, _id: 1, modifiedOn: 1 })
}
return c
})
const model = await ctx.with('to-array', {}, async () => await toArray<Tx>(cursor))

View File

@ -365,7 +365,7 @@ export async function upgradeModel (
await tryMigrate(migrateClient, coreId, [
{
state: 'indexes-v4',
state: 'indexes-v5',
func: upgradeIndexes
},
{

View File

@ -311,7 +311,6 @@ export function startHttpServer (
: false,
skipUTF8Validation: true,
maxPayload: 250 * 1024 * 1024,
backlog: 1000,
clientTracking: false // We do not need to track clients inside clients.
})
// eslint-disable-next-line @typescript-eslint/no-misused-promises

View File

@ -4,7 +4,7 @@ import config from './config'
/**
* @public
*/
export async function getWorkspaceInfo (token: string): Promise<ClientWorkspaceInfo> {
export async function getWorkspaceInfo (token: string, updateLastModified = false): Promise<ClientWorkspaceInfo> {
const accountsUrl = config.AccountsURL
const workspaceInfo = await (
await fetch(accountsUrl, {
@ -15,7 +15,7 @@ export async function getWorkspaceInfo (token: string): Promise<ClientWorkspaceI
},
body: JSON.stringify({
method: 'getWorkspaceInfo',
params: []
params: updateLastModified ? [true] : []
})
})
).json()

View File

@ -692,7 +692,7 @@ export class PlatformWorker {
)
let workspaceInfo: ClientWorkspaceInfo | undefined
try {
workspaceInfo = await getWorkspaceInfo(token)
workspaceInfo = await getWorkspaceInfo(token, true)
} catch (err: any) {
this.ctx.error('Workspace not found:', { workspace })
errors++