mirror of
https://github.com/hcengineering/platform.git
synced 2025-05-12 02:11:57 +00:00
UBERF-6150: Improve backup logic (#5041)
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
parent
3d2b60ac99
commit
18e8dcab57
@ -400,13 +400,15 @@ export function devTool (
|
|||||||
.command('backup <dirName> <workspace>')
|
.command('backup <dirName> <workspace>')
|
||||||
.description('dump workspace transactions and minio resources')
|
.description('dump workspace transactions and minio resources')
|
||||||
.option('-s, --skip <skip>', 'A list of ; separated domain names to skip during backup', '')
|
.option('-s, --skip <skip>', 'A list of ; separated domain names to skip during backup', '')
|
||||||
.action(async (dirName: string, workspace: string, cmd: { skip: string }) => {
|
.option('-f, --force', 'Force backup', false)
|
||||||
|
.action(async (dirName: string, workspace: string, cmd: { skip: string, force: boolean }) => {
|
||||||
const storage = await createFileBackupStorage(dirName)
|
const storage = await createFileBackupStorage(dirName)
|
||||||
await backup(
|
await backup(
|
||||||
transactorUrl,
|
transactorUrl,
|
||||||
getWorkspaceId(workspace, productId),
|
getWorkspaceId(workspace, productId),
|
||||||
storage,
|
storage,
|
||||||
(cmd.skip ?? '').split(';').map((it) => it.trim())
|
(cmd.skip ?? '').split(';').map((it) => it.trim()),
|
||||||
|
cmd.force
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -614,9 +614,9 @@ export async function listWorkspaces (db: Db, productId: string): Promise<Worksp
|
|||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
export async function listWorkspacesRaw (db: Db, productId: string): Promise<Workspace[]> {
|
export async function listWorkspacesRaw (db: Db, productId: string): Promise<Workspace[]> {
|
||||||
return (await db.collection<Workspace>(WORKSPACE_COLLECTION).find(withProductId(productId, {})).toArray()).map(
|
return (await db.collection<Workspace>(WORKSPACE_COLLECTION).find(withProductId(productId, {})).toArray())
|
||||||
(it) => ({ ...it, productId })
|
.map((it) => ({ ...it, productId }))
|
||||||
)
|
.filter((it) => it.disabled !== true)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -967,7 +967,7 @@ export async function getWorkspaceInfo (
|
|||||||
db: Db,
|
db: Db,
|
||||||
productId: string,
|
productId: string,
|
||||||
token: string,
|
token: string,
|
||||||
updateLatsVisit: boolean = false
|
_updateLastVisit: boolean = false
|
||||||
): Promise<ClientWorkspaceInfo> {
|
): Promise<ClientWorkspaceInfo> {
|
||||||
const { email, workspace, extra } = decodeToken(token)
|
const { email, workspace, extra } = decodeToken(token)
|
||||||
const guest = extra?.guest === 'true'
|
const guest = extra?.guest === 'true'
|
||||||
@ -1002,7 +1002,7 @@ export async function getWorkspaceInfo (
|
|||||||
if (ws == null) {
|
if (ws == null) {
|
||||||
throw new PlatformError(new Status(Severity.ERROR, platform.status.Forbidden, {}))
|
throw new PlatformError(new Status(Severity.ERROR, platform.status.Forbidden, {}))
|
||||||
}
|
}
|
||||||
if (updateLatsVisit && isAccount(account)) {
|
if (_updateLastVisit && isAccount(account)) {
|
||||||
await updateLastVisit(db, ws, account)
|
await updateLastVisit(db, ws, account)
|
||||||
}
|
}
|
||||||
return mapToClientWorkspace(ws)
|
return mapToClientWorkspace(ws)
|
||||||
|
@ -327,7 +327,8 @@ export async function backup (
|
|||||||
transactorUrl: string,
|
transactorUrl: string,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
storage: BackupStorage,
|
storage: BackupStorage,
|
||||||
skipDomains: string[] = []
|
skipDomains: string[] = [],
|
||||||
|
force: boolean = false
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const connection = (await connect(transactorUrl, workspaceId, undefined, {
|
const connection = (await connect(transactorUrl, workspaceId, undefined, {
|
||||||
mode: 'backup'
|
mode: 'backup'
|
||||||
@ -368,7 +369,7 @@ export async function backup (
|
|||||||
{ limit: 1, sort: { modifiedOn: SortingOrder.Descending } }
|
{ limit: 1, sort: { modifiedOn: SortingOrder.Descending } }
|
||||||
)
|
)
|
||||||
if (lastTx !== undefined) {
|
if (lastTx !== undefined) {
|
||||||
if (lastTx._id === backupInfo.lastTxId) {
|
if (lastTx._id === backupInfo.lastTxId && !force) {
|
||||||
console.log('No transaction changes. Skipping backup.')
|
console.log('No transaction changes. Skipping backup.')
|
||||||
return
|
return
|
||||||
} else {
|
} else {
|
||||||
@ -787,12 +788,14 @@ export async function restore (
|
|||||||
const d = blobs.get(bname)
|
const d = blobs.get(bname)
|
||||||
blobs.delete(bname)
|
blobs.delete(bname)
|
||||||
;(doc as BlobData).base64Data = d?.buffer?.toString('base64') ?? ''
|
;(doc as BlobData).base64Data = d?.buffer?.toString('base64') ?? ''
|
||||||
|
;(doc as any)['%hash%'] = changeset.get(doc._id)
|
||||||
void sendChunk(doc, bf.length).finally(() => {
|
void sendChunk(doc, bf.length).finally(() => {
|
||||||
requiredDocs.delete(doc._id)
|
requiredDocs.delete(doc._id)
|
||||||
next()
|
next()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
;(doc as any)['%hash%'] = changeset.get(doc._id)
|
||||||
void sendChunk(doc, bf.length).finally(() => {
|
void sendChunk(doc, bf.length).finally(() => {
|
||||||
requiredDocs.delete(doc._id)
|
requiredDocs.delete(doc._id)
|
||||||
next()
|
next()
|
||||||
|
@ -295,6 +295,9 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
|||||||
|
|
||||||
async initializeStages (): Promise<void> {
|
async initializeStages (): Promise<void> {
|
||||||
for (const st of this.stages) {
|
for (const st of this.stages) {
|
||||||
|
if (this.cancelling) {
|
||||||
|
return
|
||||||
|
}
|
||||||
await st.initialize(this.metrics, this.storage, this)
|
await st.initialize(this.metrics, this.storage, this)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -312,6 +315,9 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
|||||||
// We need to be sure we have individual indexes per stage.
|
// We need to be sure we have individual indexes per stage.
|
||||||
const oldStagesRegex = [/fld-v.*/, /cnt-v.*/, /fts-v.*/, /sum-v.*/]
|
const oldStagesRegex = [/fld-v.*/, /cnt-v.*/, /fts-v.*/, /sum-v.*/]
|
||||||
for (const st of this.stages) {
|
for (const st of this.stages) {
|
||||||
|
if (this.cancelling) {
|
||||||
|
return
|
||||||
|
}
|
||||||
const regexp = oldStagesRegex.find((r) => r.test(st.stageId))
|
const regexp = oldStagesRegex.find((r) => r.test(st.stageId))
|
||||||
if (regexp !== undefined) {
|
if (regexp !== undefined) {
|
||||||
await this.storage.removeOldIndex(DOMAIN_DOC_INDEX_STATE, regexp, new RegExp(st.stageId))
|
await this.storage.removeOldIndex(DOMAIN_DOC_INDEX_STATE, regexp, new RegExp(st.stageId))
|
||||||
@ -404,6 +410,9 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
|||||||
let idx = 0
|
let idx = 0
|
||||||
const _classUpdate = new Set<Ref<Class<Doc>>>()
|
const _classUpdate = new Set<Ref<Class<Doc>>>()
|
||||||
for (const st of this.stages) {
|
for (const st of this.stages) {
|
||||||
|
if (this.cancelling) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
idx++
|
idx++
|
||||||
await rateLimiter.exec(async () => {
|
await rateLimiter.exec(async () => {
|
||||||
while (true) {
|
while (true) {
|
||||||
|
@ -20,6 +20,7 @@ import core, {
|
|||||||
TxProcessor,
|
TxProcessor,
|
||||||
cutObjectArray,
|
cutObjectArray,
|
||||||
escapeLikeForRegexp,
|
escapeLikeForRegexp,
|
||||||
|
generateId,
|
||||||
getTypeOf,
|
getTypeOf,
|
||||||
isOperator,
|
isOperator,
|
||||||
toFindResult,
|
toFindResult,
|
||||||
@ -70,7 +71,6 @@ import {
|
|||||||
type Sort,
|
type Sort,
|
||||||
type UpdateFilter
|
type UpdateFilter
|
||||||
} from 'mongodb'
|
} from 'mongodb'
|
||||||
import { createHash } from 'node:crypto'
|
|
||||||
import { getMongoClient, getWorkspaceDB } from './utils'
|
import { getMongoClient, getWorkspaceDB } from './utils'
|
||||||
|
|
||||||
function translateDoc (doc: Doc): Document {
|
function translateDoc (doc: Doc): Document {
|
||||||
@ -699,6 +699,8 @@ abstract class MongoAdapterBase implements DbAdapter {
|
|||||||
const coll = this.db.collection<Doc>(domain)
|
const coll = this.db.collection<Doc>(domain)
|
||||||
const iterator = coll.find({}, {})
|
const iterator = coll.find({}, {})
|
||||||
|
|
||||||
|
const hashID = generateId() // We just need a different value
|
||||||
|
|
||||||
const bulkUpdate = new Map<Ref<Doc>, string>()
|
const bulkUpdate = new Map<Ref<Doc>, string>()
|
||||||
const flush = async (flush = false): Promise<void> => {
|
const flush = async (flush = false): Promise<void> => {
|
||||||
if (bulkUpdate.size > 1000 || flush) {
|
if (bulkUpdate.size > 1000 || flush) {
|
||||||
@ -728,11 +730,8 @@ abstract class MongoAdapterBase implements DbAdapter {
|
|||||||
}
|
}
|
||||||
const pos = (digest ?? '').indexOf('|')
|
const pos = (digest ?? '').indexOf('|')
|
||||||
if (digest == null || digest === '' || pos === -1) {
|
if (digest == null || digest === '' || pos === -1) {
|
||||||
const doc = JSON.stringify(d)
|
const size = this.calcSize(d)
|
||||||
const hash = createHash('sha256')
|
digest = hashID // we just need some random value
|
||||||
hash.update(doc)
|
|
||||||
const size = doc.length
|
|
||||||
digest = hash.digest('base64')
|
|
||||||
|
|
||||||
bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
|
bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
|
||||||
|
|
||||||
@ -757,6 +756,9 @@ abstract class MongoAdapterBase implements DbAdapter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return some estimation for object size
|
||||||
|
*/
|
||||||
calcSize (obj: any): number {
|
calcSize (obj: any): number {
|
||||||
if (typeof obj === 'undefined') {
|
if (typeof obj === 'undefined') {
|
||||||
return 0
|
return 0
|
||||||
@ -769,17 +771,41 @@ abstract class MongoAdapterBase implements DbAdapter {
|
|||||||
// include prototype properties
|
// include prototype properties
|
||||||
const value = obj[key]
|
const value = obj[key]
|
||||||
const type = getTypeOf(value)
|
const type = getTypeOf(value)
|
||||||
if (type === 'Array') {
|
result += key.length
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case 'Array':
|
||||||
|
result += 4 + this.calcSize(value)
|
||||||
|
break
|
||||||
|
case 'Object':
|
||||||
result += this.calcSize(value)
|
result += this.calcSize(value)
|
||||||
} else if (type === 'Object') {
|
break
|
||||||
result += this.calcSize(value)
|
case 'Date':
|
||||||
} else if (type === 'Date') {
|
result += 24 // Some value
|
||||||
result += new Date(value.getTime()).toString().length
|
break
|
||||||
}
|
case 'string':
|
||||||
if (type === 'string') {
|
|
||||||
result += (value as string).length
|
result += (value as string).length
|
||||||
} else {
|
break
|
||||||
result += JSON.stringify(value).length
|
case 'number':
|
||||||
|
result += 8
|
||||||
|
break
|
||||||
|
case 'boolean':
|
||||||
|
result += 1
|
||||||
|
break
|
||||||
|
case 'symbol':
|
||||||
|
result += (value as symbol).toString().length
|
||||||
|
break
|
||||||
|
case 'bigint':
|
||||||
|
result += (value as bigint).toString().length
|
||||||
|
break
|
||||||
|
case 'undefined':
|
||||||
|
result += 1
|
||||||
|
break
|
||||||
|
case 'null':
|
||||||
|
result += 1
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
result += value.toString().length
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
@ -802,13 +828,21 @@ abstract class MongoAdapterBase implements DbAdapter {
|
|||||||
while (ops.length > 0) {
|
while (ops.length > 0) {
|
||||||
const part = ops.splice(0, 500)
|
const part = ops.splice(0, 500)
|
||||||
await coll.bulkWrite(
|
await coll.bulkWrite(
|
||||||
part.map((it) => ({
|
part.map((it) => {
|
||||||
|
const digest: string | null = (it as any)['%hash%']
|
||||||
|
if ('%hash%' in it) {
|
||||||
|
delete it['%hash%']
|
||||||
|
}
|
||||||
|
const size = this.calcSize(it)
|
||||||
|
|
||||||
|
return {
|
||||||
replaceOne: {
|
replaceOne: {
|
||||||
filter: { _id: it._id },
|
filter: { _id: it._id },
|
||||||
replacement: { ...it, '%hash%': null },
|
replacement: { ...it, '%hash%': digest == null ? null : `${digest}|${size.toString(16)}` },
|
||||||
upsert: true
|
upsert: true
|
||||||
}
|
}
|
||||||
}))
|
}
|
||||||
|
})
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -221,7 +221,11 @@ export async function upgradeModel (
|
|||||||
|
|
||||||
logger.log(`${workspaceId.name}: Apply upgrade operations`)
|
logger.log(`${workspaceId.name}: Apply upgrade operations`)
|
||||||
|
|
||||||
const connection = await connect(transactorUrl, workspaceId, undefined, { mode: 'backup', model: 'upgrade' })
|
const connection = await connect(transactorUrl, workspaceId, undefined, {
|
||||||
|
mode: 'backup',
|
||||||
|
model: 'upgrade',
|
||||||
|
admin: 'true'
|
||||||
|
})
|
||||||
|
|
||||||
// Create update indexes
|
// Create update indexes
|
||||||
await createUpdateIndexes(connection, db, logger)
|
await createUpdateIndexes(connection, db, logger)
|
||||||
|
@ -212,17 +212,13 @@ class TSessionManager implements SessionManager {
|
|||||||
return await baseCtx.with('📲 add-session', {}, async (ctx) => {
|
return await baseCtx.with('📲 add-session', {}, async (ctx) => {
|
||||||
const wsString = toWorkspaceString(token.workspace, '@')
|
const wsString = toWorkspaceString(token.workspace, '@')
|
||||||
|
|
||||||
const workspaceInfo =
|
let workspaceInfo =
|
||||||
accountsUrl !== ''
|
accountsUrl !== '' ? await this.getWorkspaceInfo(accountsUrl, rawToken) : this.wsFromToken(token)
|
||||||
? await this.getWorkspaceInfo(accountsUrl, rawToken)
|
if (workspaceInfo === undefined && token.extra?.admin !== 'true') {
|
||||||
: {
|
|
||||||
workspace: token.workspace.name,
|
|
||||||
workspaceUrl: token.workspace.name,
|
|
||||||
workspaceName: token.workspace.name
|
|
||||||
}
|
|
||||||
if (workspaceInfo === undefined) {
|
|
||||||
// No access to workspace for token.
|
// No access to workspace for token.
|
||||||
return { error: new Error(`No access to workspace for token ${token.email} ${token.workspace.name}`) }
|
return { error: new Error(`No access to workspace for token ${token.email} ${token.workspace.name}`) }
|
||||||
|
} else {
|
||||||
|
workspaceInfo = this.wsFromToken(token)
|
||||||
}
|
}
|
||||||
|
|
||||||
let workspace = this.workspaces.get(wsString)
|
let workspace = this.workspaces.get(wsString)
|
||||||
@ -296,6 +292,18 @@ class TSessionManager implements SessionManager {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private wsFromToken (token: Token): {
|
||||||
|
workspace: string
|
||||||
|
workspaceUrl?: string | null
|
||||||
|
workspaceName?: string
|
||||||
|
} {
|
||||||
|
return {
|
||||||
|
workspace: token.workspace.name,
|
||||||
|
workspaceUrl: token.workspace.name,
|
||||||
|
workspaceName: token.workspace.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private async createUpgradeSession (
|
private async createUpgradeSession (
|
||||||
token: Token,
|
token: Token,
|
||||||
sessionId: string | undefined,
|
sessionId: string | undefined,
|
||||||
|
Loading…
Reference in New Issue
Block a user