mirror of
https://github.com/hcengineering/platform.git
synced 2025-04-29 11:43:49 +00:00
UBERF-9299: Fix backup service order and add parallel (#7846)
This commit is contained in:
parent
244ddd42af
commit
2ceadf28d8
13
.vscode/launch.json
vendored
13
.vscode/launch.json
vendored
@ -55,7 +55,7 @@
|
||||
"MODEL_JSON": "${workspaceRoot}/models/all/bundle/model.json",
|
||||
// "SERVER_PROVIDER":"uweb"
|
||||
"SERVER_PROVIDER":"ws",
|
||||
"MODEL_VERSION": "0.6.382",
|
||||
"MODEL_VERSION": "0.6.421",
|
||||
// "VERSION": "0.6.289",
|
||||
"ELASTIC_INDEX_NAME": "local_storage_index",
|
||||
"UPLOAD_URL": "/files",
|
||||
@ -166,7 +166,7 @@
|
||||
"MINIO_ACCESS_KEY": "minioadmin",
|
||||
"MINIO_SECRET_KEY": "minioadmin",
|
||||
"MINIO_ENDPOINT": "localhost",
|
||||
"MODEL_VERSION": "v0.6.382",
|
||||
"MODEL_VERSION": "v0.6.421",
|
||||
"WS_OPERATION": "all+backup",
|
||||
"BACKUP_STORAGE": "minio|minio?accessKey=minioadmin&secretKey=minioadmin",
|
||||
"BACKUP_BUCKET": "dev-backups",
|
||||
@ -189,6 +189,7 @@
|
||||
// "DB_URL": "mongodb://localhost:27017",
|
||||
// "DB_URL": "postgresql://postgres:example@localhost:5432",
|
||||
"DB_URL": "postgresql://root@host.docker.internal:26257/defaultdb?sslmode=disable",
|
||||
"FULLTEXT_URL": "http://host.docker.internal:4702",
|
||||
"REGION": "cockroach",
|
||||
"SERVER_SECRET": "secret",
|
||||
"TRANSACTOR_URL": "ws://localhost:3332",
|
||||
@ -198,7 +199,7 @@
|
||||
"MINIO_ACCESS_KEY": "minioadmin",
|
||||
"MINIO_SECRET_KEY": "minioadmin",
|
||||
"MINIO_ENDPOINT": "localhost",
|
||||
"MODEL_VERSION": "v0.6.382",
|
||||
"MODEL_VERSION": "0.6.421",
|
||||
"WS_OPERATION": "all+backup",
|
||||
"BACKUP_STORAGE": "minio|minio?accessKey=minioadmin&secretKey=minioadmin",
|
||||
"BACKUP_BUCKET": "dev-backups",
|
||||
@ -326,7 +327,7 @@
|
||||
"ACCOUNTS_URL": "http://localhost:3000",
|
||||
"TELEGRAM_DATABASE": "telegram-service",
|
||||
"REKONI_URL": "http://localhost:4004",
|
||||
"MODEL_VERSION": "0.6.382"
|
||||
"MODEL_VERSION": "0.6.421"
|
||||
},
|
||||
"runtimeVersion": "20",
|
||||
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
|
||||
@ -353,7 +354,7 @@
|
||||
"MONGO_URL": "mongodb://localhost:27017",
|
||||
"TELEGRAM_DATABASE": "telegram-service",
|
||||
"REKONI_URL": "http://localhost:4004",
|
||||
"MODEL_VERSION": "0.6.382"
|
||||
"MODEL_VERSION": "0.6.421"
|
||||
},
|
||||
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
|
||||
"sourceMaps": true,
|
||||
@ -397,7 +398,7 @@
|
||||
"SECRET": "secret",
|
||||
"REGION": "cockroach",
|
||||
"BUCKET_NAME":"backups",
|
||||
"INTERVAL":"30"
|
||||
"INTERVAL":"43200"
|
||||
},
|
||||
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
|
||||
"showAsyncStacks": true,
|
||||
|
@ -495,6 +495,7 @@ services:
|
||||
- MONGO_URL=${MONGO_URL}
|
||||
- ACCOUNTS_URL=http://host.docker.internal:3000
|
||||
- SUPPORT_WORKSPACE=support
|
||||
- STORAGE_CONFIG=${STORAGE_CONFIG}
|
||||
- FIRST_NAME=Jolie
|
||||
- LAST_NAME=AI
|
||||
- PASSWORD=password
|
||||
|
@ -709,7 +709,7 @@ export function isActiveMode (mode?: WorkspaceMode): boolean {
|
||||
return mode === 'active'
|
||||
}
|
||||
export function isDeletingMode (mode: WorkspaceMode): boolean {
|
||||
return mode === 'pending-deletion' || mode === 'deleting'
|
||||
return mode === 'pending-deletion' || mode === 'deleting' || mode === 'deleted'
|
||||
}
|
||||
export function isArchivingMode (mode?: WorkspaceMode): boolean {
|
||||
return (
|
||||
@ -751,6 +751,8 @@ export type WorkspaceUpdateEvent =
|
||||
| 'archiving-clean-started'
|
||||
| 'archiving-clean-done'
|
||||
| 'archiving-done'
|
||||
| 'delete-started'
|
||||
| 'delete-done'
|
||||
|
||||
export interface BackupStatus {
|
||||
dataSize: number
|
||||
|
@ -10,10 +10,11 @@
|
||||
type BaseWorkspaceInfo
|
||||
} from '@hcengineering/core'
|
||||
import { getEmbeddedLabel } from '@hcengineering/platform'
|
||||
import { isAdminUser } from '@hcengineering/presentation'
|
||||
import { isAdminUser, MessageBox } from '@hcengineering/presentation'
|
||||
import {
|
||||
Button,
|
||||
ButtonMenu,
|
||||
CheckBox,
|
||||
Expandable,
|
||||
IconArrowRight,
|
||||
IconOpen,
|
||||
@ -23,12 +24,14 @@
|
||||
Popup,
|
||||
Scroller,
|
||||
SearchEdit,
|
||||
ticker,
|
||||
CheckBox
|
||||
showPopup,
|
||||
ticker
|
||||
} from '@hcengineering/ui'
|
||||
import { workbenchId } from '@hcengineering/workbench'
|
||||
import { getAllWorkspaces, getRegionInfo, performWorkspaceOperation, type RegionInfo } from '../utils'
|
||||
|
||||
$: now = $ticker
|
||||
|
||||
$: isAdmin = isAdminUser()
|
||||
|
||||
let search: string = ''
|
||||
@ -42,16 +45,119 @@
|
||||
|
||||
let workspaces: WorkspaceInfo[] = []
|
||||
|
||||
enum SortingRule {
|
||||
Name = '1',
|
||||
BackupDate = '2',
|
||||
BackupSize = '3',
|
||||
LastVisit = '4'
|
||||
}
|
||||
|
||||
let sortingRule = SortingRule.BackupDate
|
||||
|
||||
const sortRules = {
|
||||
[SortingRule.Name]: 'Name',
|
||||
[SortingRule.BackupDate]: 'Backup date',
|
||||
[SortingRule.BackupSize]: 'Backup size',
|
||||
[SortingRule.LastVisit]: 'Last visit'
|
||||
}
|
||||
|
||||
const updateWorkspaces = reduceCalls(async (_: number) => {
|
||||
const res = await getAllWorkspaces()
|
||||
workspaces = res.sort((a, b) =>
|
||||
(b.workspaceUrl ?? b.workspace).localeCompare(a.workspaceUrl ?? a.workspace)
|
||||
) as WorkspaceInfo[]
|
||||
workspaces = res as WorkspaceInfo[]
|
||||
})
|
||||
|
||||
$: void updateWorkspaces($ticker)
|
||||
|
||||
const now = Date.now()
|
||||
$: sortedWorkspaces = workspaces
|
||||
.filter(
|
||||
(it) =>
|
||||
((it.workspaceName?.includes(search) ?? false) ||
|
||||
(it.workspaceUrl?.includes(search) ?? false) ||
|
||||
it.workspace?.includes(search) ||
|
||||
it.createdBy?.includes(search)) &&
|
||||
((showActive && isActiveMode(it.mode)) ||
|
||||
(showArchived && isArchivingMode(it.mode)) ||
|
||||
(showDeleted && isDeletingMode(it.mode)) ||
|
||||
(showOther && (isMigrationMode(it.mode) || isRestoringMode(it.mode))))
|
||||
)
|
||||
.sort((a, b) => {
|
||||
switch (sortingRule) {
|
||||
case SortingRule.BackupDate: {
|
||||
return (a.backupInfo?.lastBackup ?? 0) - (b.backupInfo?.lastBackup ?? 0)
|
||||
}
|
||||
case SortingRule.BackupSize:
|
||||
return (b.backupInfo?.backupSize ?? 0) - (a.backupInfo?.backupSize ?? 0)
|
||||
case SortingRule.LastVisit:
|
||||
return (b.lastVisit ?? 0) - (a.lastVisit ?? 0)
|
||||
}
|
||||
return (b.workspaceUrl ?? b.workspace).localeCompare(a.workspaceUrl ?? a.workspace)
|
||||
})
|
||||
|
||||
let backupIdx = new Map<string, number>()
|
||||
|
||||
const backupInterval: number = 43200
|
||||
|
||||
let backupable: WorkspaceInfo[] = []
|
||||
|
||||
$: {
|
||||
// Assign backup idx
|
||||
const backupSorting = [...sortedWorkspaces].filter((it) => {
|
||||
if (!isActiveMode(it.mode)) {
|
||||
return false
|
||||
}
|
||||
const lastBackup = it.backupInfo?.lastBackup ?? 0
|
||||
if ((now - lastBackup) / 1000 < backupInterval) {
|
||||
// No backup required, interval not elapsed
|
||||
return false
|
||||
}
|
||||
|
||||
const createdOn = Math.floor((now - it.createdOn) / 1000)
|
||||
if (createdOn <= 2) {
|
||||
// Skip if we created is less 2 days
|
||||
return false
|
||||
}
|
||||
if (it.lastVisit == null) {
|
||||
return false
|
||||
}
|
||||
|
||||
const lastVisitSec = Math.floor((now - it.lastVisit) / 1000)
|
||||
if (lastVisitSec > backupInterval) {
|
||||
// No backup required, interval not elapsed
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
const newBackupIdx = new Map<string, number>()
|
||||
|
||||
backupSorting.sort((a, b) => {
|
||||
return (a.backupInfo?.lastBackup ?? 0) - (b.backupInfo?.lastBackup ?? 0)
|
||||
})
|
||||
|
||||
// Shift new with existing ones.
|
||||
const existingNew = groupByArray(backupSorting, (it) => it.backupInfo != null)
|
||||
|
||||
const existing = existingNew.get(true) ?? []
|
||||
const newOnes = existingNew.get(false) ?? []
|
||||
const mixedBackupSorting: WorkspaceInfo[] = []
|
||||
|
||||
while (existing.length > 0 || newOnes.length > 0) {
|
||||
const e = existing.shift()
|
||||
const n = newOnes.shift()
|
||||
if (e != null) {
|
||||
mixedBackupSorting.push(e)
|
||||
}
|
||||
if (n != null) {
|
||||
mixedBackupSorting.push(n)
|
||||
}
|
||||
}
|
||||
|
||||
backupable = mixedBackupSorting
|
||||
|
||||
for (const [idx, it] of mixedBackupSorting.entries()) {
|
||||
newBackupIdx.set(it.workspace, idx)
|
||||
}
|
||||
backupIdx = newBackupIdx
|
||||
}
|
||||
|
||||
const dayRanges = {
|
||||
Today: 1,
|
||||
@ -72,32 +178,19 @@
|
||||
|
||||
let showActive: boolean = true
|
||||
let showArchived: boolean = false
|
||||
let showDeleted: boolean = true
|
||||
let showDeleted: boolean = false
|
||||
let showOther: boolean = true
|
||||
|
||||
$: groupped = groupByArray(
|
||||
workspaces.filter(
|
||||
(it) =>
|
||||
((it.workspaceName?.includes(search) ?? false) ||
|
||||
(it.workspaceUrl?.includes(search) ?? false) ||
|
||||
it.workspace?.includes(search) ||
|
||||
it.createdBy?.includes(search)) &&
|
||||
((showActive && isActiveMode(it.mode)) ||
|
||||
(showArchived && isArchivingMode(it.mode)) ||
|
||||
(showDeleted && isDeletingMode(it.mode)) ||
|
||||
(showOther && (isMigrationMode(it.mode) || isRestoringMode(it.mode))))
|
||||
),
|
||||
(it) => {
|
||||
const lastUsageDays = Math.round((now - it.lastVisit) / (1000 * 3600 * 24))
|
||||
return Object.entries(dayRanges).find(([_k, v]) => lastUsageDays <= v)?.[0] ?? 'Other'
|
||||
}
|
||||
)
|
||||
$: groupped = groupByArray(sortedWorkspaces, (it) => {
|
||||
const lastUsageDays = Math.round((now - it.lastVisit) / (1000 * 3600 * 24))
|
||||
return Object.entries(dayRanges).find(([_k, v]) => lastUsageDays <= v)?.[0] ?? 'Other'
|
||||
})
|
||||
|
||||
let regionInfo: RegionInfo[] = []
|
||||
|
||||
let selectedRegionId: string = ''
|
||||
void getRegionInfo().then((_regionInfo) => {
|
||||
regionInfo = _regionInfo ?? []
|
||||
regionInfo = _regionInfo?.filter((it) => it.name !== '') ?? []
|
||||
if (selectedRegionId === '' && regionInfo.length > 0) {
|
||||
selectedRegionId = regionInfo[0].region
|
||||
}
|
||||
@ -111,6 +204,8 @@
|
||||
<div class="fs-title p-3">Workspaces administration panel</div>
|
||||
<div class="fs-title p-3">
|
||||
Workspaces: {workspaces.length} active: {workspaces.filter((it) => isActiveMode(it.mode)).length}
|
||||
|
||||
Backupable: {backupable.length} new: {backupable.reduce((p, it) => p + (it.backupInfo == null ? 1 : 0), 0)}
|
||||
</div>
|
||||
<div class="fs-title p-3 flex-no-shrink">
|
||||
<SearchEdit bind:value={search} width={'100%'} />
|
||||
@ -136,6 +231,19 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="fs-title p-3 flex-row-center">
|
||||
<span class="mr-2"> Sorting order: {sortingRule} </span>
|
||||
<ButtonMenu
|
||||
selected={sortingRule}
|
||||
autoSelectionIfOne
|
||||
title={sortRules[sortingRule]}
|
||||
items={Object.entries(sortRules).map((it) => ({ id: it[0], label: getEmbeddedLabel(it[1]) }))}
|
||||
on:selected={(it) => {
|
||||
sortingRule = it.detail
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="fs-title p-3 flex-row-center">
|
||||
<span class="mr-2"> Migration region selector: </span>
|
||||
<ButtonMenu
|
||||
@ -193,10 +301,16 @@
|
||||
label={getEmbeddedLabel(`Mass Archive ${archiveV.length}`)}
|
||||
kind={'ghost'}
|
||||
on:click={() => {
|
||||
void performWorkspaceOperation(
|
||||
archiveV.map((it) => it.workspace),
|
||||
'archive'
|
||||
)
|
||||
showPopup(MessageBox, {
|
||||
label: getEmbeddedLabel(`Mass Archive ${archiveV.length}`),
|
||||
message: getEmbeddedLabel(`Please confirm archive ${archiveV.length} workspaces`),
|
||||
action: async () => {
|
||||
void performWorkspaceOperation(
|
||||
archiveV.map((it) => it.workspace),
|
||||
'archive'
|
||||
)
|
||||
}
|
||||
})
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
@ -207,18 +321,25 @@
|
||||
kind={'positive'}
|
||||
label={getEmbeddedLabel(`Mass Migrate ${activeV.length} to ${selectedRegionName ?? ''}`)}
|
||||
on:click={() => {
|
||||
void performWorkspaceOperation(
|
||||
activeV.map((it) => it.workspace),
|
||||
'migrate-to',
|
||||
selectedRegionId
|
||||
)
|
||||
showPopup(MessageBox, {
|
||||
label: getEmbeddedLabel(`Mass Migrate ${archiveV.length}`),
|
||||
message: getEmbeddedLabel(`Please confirm migrate ${archiveV.length} workspaces`),
|
||||
action: async () => {
|
||||
await performWorkspaceOperation(
|
||||
activeV.map((it) => it.workspace),
|
||||
'migrate-to',
|
||||
selectedRegionId
|
||||
)
|
||||
}
|
||||
})
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
</svelte:fragment>
|
||||
{#each v.slice(0, limit) as workspace}
|
||||
{@const wsName = workspace.workspaceName ?? workspace.workspace}
|
||||
{@const lastUsageDays = Math.round((Date.now() - workspace.lastVisit) / (1000 * 3600 * 24))}
|
||||
{@const lastUsageDays = Math.round((now - workspace.lastVisit) / (1000 * 3600 * 24))}
|
||||
{@const bIdx = backupIdx.get(workspace.workspace)}
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div class="flex fs-title cursor-pointer focused-button bordered">
|
||||
@ -272,6 +393,9 @@
|
||||
{Math.round(sz * 100) / 100}Mb
|
||||
{/if}
|
||||
{/if}
|
||||
{#if bIdx != null}
|
||||
[#{bIdx}]
|
||||
{/if}
|
||||
</span>
|
||||
<span class="flex flex-between" style:width={'5rem'}>
|
||||
{#if workspace.backupInfo != null}
|
||||
@ -294,7 +418,13 @@
|
||||
label={getEmbeddedLabel('Archive')}
|
||||
kind={'ghost'}
|
||||
on:click={() => {
|
||||
void performWorkspaceOperation(workspace.workspace, 'archive')
|
||||
showPopup(MessageBox, {
|
||||
label: getEmbeddedLabel(`Archive ${workspace.workspaceUrl}`),
|
||||
message: getEmbeddedLabel('Please confirm'),
|
||||
action: async () => {
|
||||
await performWorkspaceOperation(workspace.workspace, 'archive')
|
||||
}
|
||||
})
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
@ -306,7 +436,13 @@
|
||||
kind={'ghost'}
|
||||
label={getEmbeddedLabel('Unarchive')}
|
||||
on:click={() => {
|
||||
void performWorkspaceOperation(workspace.workspace, 'unarchive')
|
||||
showPopup(MessageBox, {
|
||||
label: getEmbeddedLabel(`Unarchive ${workspace.workspaceUrl}`),
|
||||
message: getEmbeddedLabel('Please confirm'),
|
||||
action: async () => {
|
||||
await performWorkspaceOperation(workspace.workspace, 'unarchive')
|
||||
}
|
||||
})
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
@ -318,7 +454,31 @@
|
||||
kind={'positive'}
|
||||
label={getEmbeddedLabel('Migrate ' + (selectedRegionName ?? ''))}
|
||||
on:click={() => {
|
||||
void performWorkspaceOperation(workspace.workspace, 'migrate-to', selectedRegionId)
|
||||
showPopup(MessageBox, {
|
||||
label: getEmbeddedLabel(`Migrate ${workspace.workspaceUrl}`),
|
||||
message: getEmbeddedLabel('Please confirm'),
|
||||
action: async () => {
|
||||
await performWorkspaceOperation(workspace.workspace, 'migrate-to', selectedRegionId)
|
||||
}
|
||||
})
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
{#if !isDeletingMode(workspace.mode) && !isArchivingMode(workspace.mode)}
|
||||
<Button
|
||||
icon={IconStop}
|
||||
size={'small'}
|
||||
kind={'dangerous'}
|
||||
label={getEmbeddedLabel('Delete')}
|
||||
on:click={() => {
|
||||
showPopup(MessageBox, {
|
||||
label: getEmbeddedLabel(`Delete ${workspace.workspaceUrl}`),
|
||||
message: getEmbeddedLabel('Please confirm'),
|
||||
action: async () => {
|
||||
await performWorkspaceOperation(workspace.workspace, 'delete')
|
||||
}
|
||||
})
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
|
@ -274,7 +274,7 @@ export async function getWorkspaces (): Promise<Workspace[]> {
|
||||
|
||||
export async function performWorkspaceOperation (
|
||||
workspace: string | string[],
|
||||
operation: 'archive' | 'migrate-to' | 'unarchive',
|
||||
operation: 'archive' | 'migrate-to' | 'unarchive' | 'delete',
|
||||
...params: any[]
|
||||
): Promise<boolean> {
|
||||
const accountsUrl = getMetadata(login.metadata.AccountsUrl)
|
||||
|
@ -121,6 +121,7 @@
|
||||
}
|
||||
|
||||
async function downloadFile (filename: string): Promise<void> {
|
||||
const a = document.createElement('a')
|
||||
try {
|
||||
const response = await fetch(getFileUrl(filename), {
|
||||
headers: {
|
||||
@ -128,18 +129,17 @@
|
||||
}
|
||||
})
|
||||
if (!response.ok) throw new Error('Download failed')
|
||||
|
||||
const blob = await response.blob()
|
||||
const url = window.URL.createObjectURL(blob)
|
||||
const a = document.createElement('a')
|
||||
a.href = url
|
||||
a.download = filename
|
||||
document.body.appendChild(a)
|
||||
a.click()
|
||||
window.URL.revokeObjectURL(url)
|
||||
document.body.removeChild(a)
|
||||
} catch (err) {
|
||||
console.error('Failed to download:', err)
|
||||
} finally {
|
||||
document.body.removeChild(a)
|
||||
}
|
||||
}
|
||||
let copied = false
|
||||
|
@ -174,12 +174,14 @@ class WorkspaceIndexer {
|
||||
return result
|
||||
}
|
||||
|
||||
async reindex (): Promise<void> {
|
||||
async reindex (onlyDrop: boolean): Promise<void> {
|
||||
await this.fulltext.cancel()
|
||||
await this.fulltext.clearIndex()
|
||||
await this.fulltext.startIndexing(() => {
|
||||
this.lastUpdate = Date.now()
|
||||
})
|
||||
await this.fulltext.clearIndex(onlyDrop)
|
||||
if (!onlyDrop) {
|
||||
await this.fulltext.startIndexing(() => {
|
||||
this.lastUpdate = Date.now()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async close (): Promise<void> {
|
||||
@ -211,6 +213,7 @@ interface Search {
|
||||
|
||||
interface Reindex {
|
||||
token: string
|
||||
onlyDrop?: boolean
|
||||
}
|
||||
// Register close on process exit.
|
||||
process.on('exit', () => {
|
||||
@ -441,7 +444,7 @@ export async function startIndexer (
|
||||
const indexer = await getIndexer(ctx, decoded.workspace, request.token, true)
|
||||
if (indexer !== undefined) {
|
||||
indexer.lastUpdate = Date.now()
|
||||
await indexer.reindex()
|
||||
await indexer.reindex(request?.onlyDrop ?? false)
|
||||
}
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
|
@ -233,6 +233,8 @@ export class WorkspaceMongoDbCollection extends MongoDbCollection<Workspace> imp
|
||||
{ mode: { $in: ['archiving-pending-backup', 'archiving-backup', 'archiving-pending-clean', 'archiving-clean'] } }
|
||||
]
|
||||
|
||||
const deletingQuery: Filter<Workspace>['$or'] = [{ mode: { $in: ['pending-deletion', 'deleting'] } }]
|
||||
|
||||
const restoreQuery: Filter<Workspace>['$or'] = [{ mode: { $in: ['pending-restore', 'restoring'] } }]
|
||||
|
||||
const versionQuery = {
|
||||
@ -284,7 +286,14 @@ export class WorkspaceMongoDbCollection extends MongoDbCollection<Workspace> imp
|
||||
break
|
||||
case 'all+backup':
|
||||
operationQuery = {
|
||||
$or: [...pendingCreationQuery, ...pendingUpgradeQuery, ...migrationQuery, ...archivingQuery, ...restoreQuery]
|
||||
$or: [
|
||||
...pendingCreationQuery,
|
||||
...pendingUpgradeQuery,
|
||||
...migrationQuery,
|
||||
...archivingQuery,
|
||||
...restoreQuery,
|
||||
...deletingQuery
|
||||
]
|
||||
}
|
||||
break
|
||||
}
|
||||
|
@ -387,6 +387,7 @@ export class WorkspacePostgresDbCollection extends PostgresDbCollection<Workspac
|
||||
"mode IN ('migration-backup', 'migration-pending-backup', 'migration-clean', 'migration-pending-clean')"
|
||||
|
||||
const restoringSql = "mode IN ('pending-restore', 'restoring')"
|
||||
const deletingSql = "mode IN ('pending-deletion', 'deleting')"
|
||||
|
||||
const archivingSql =
|
||||
"mode IN ('archiving-pending-backup', 'archiving-backup', 'archiving-pending-clean', 'archiving-clean')"
|
||||
@ -405,7 +406,7 @@ export class WorkspacePostgresDbCollection extends PostgresDbCollection<Workspac
|
||||
operationSql = `(${pendingCreationSql} OR ${pendingUpgradeSql})`
|
||||
break
|
||||
case 'all+backup':
|
||||
operationSql = `(${pendingCreationSql} OR ${pendingUpgradeSql} OR ${migrationSql} OR ${archivingSql} OR ${restoringSql})`
|
||||
operationSql = `(${pendingCreationSql} OR ${pendingUpgradeSql} OR ${migrationSql} OR ${archivingSql} OR ${restoringSql}) OR ${deletingSql}`
|
||||
break
|
||||
}
|
||||
|
||||
|
@ -1271,6 +1271,14 @@ export async function updateWorkspaceInfo (
|
||||
update.mode = 'archived'
|
||||
update.progress = 100
|
||||
break
|
||||
case 'delete-started':
|
||||
update.mode = 'deleting'
|
||||
update.progress = 0
|
||||
break
|
||||
case 'delete-done':
|
||||
update.mode = 'deleted'
|
||||
update.progress = 100
|
||||
break
|
||||
case 'ping':
|
||||
default:
|
||||
break
|
||||
@ -1297,7 +1305,7 @@ export async function performWorkspaceOperation (
|
||||
branding: Branding | null,
|
||||
token: string,
|
||||
workspaceId: string | string[],
|
||||
event: 'archive' | 'migrate-to' | 'unarchive',
|
||||
event: 'archive' | 'migrate-to' | 'unarchive' | 'delete',
|
||||
...params: any
|
||||
): Promise<boolean> {
|
||||
const decodedToken = decodeToken(ctx, token)
|
||||
@ -1320,6 +1328,16 @@ export async function performWorkspaceOperation (
|
||||
for (const workspaceInfo of workspaceInfos) {
|
||||
const update: Partial<WorkspaceInfo> = {}
|
||||
switch (event) {
|
||||
case 'delete':
|
||||
if (workspaceInfo.mode !== 'active') {
|
||||
throw new PlatformError(unknownError('Archive allowed only for active workspaces'))
|
||||
}
|
||||
|
||||
update.mode = 'pending-deletion'
|
||||
update.attempts = 0
|
||||
update.progress = 0
|
||||
update.lastProcessingTime = Date.now() - processingTimeoutMs // To not wait for next step
|
||||
break
|
||||
case 'archive':
|
||||
if (workspaceInfo.mode !== 'active') {
|
||||
throw new PlatformError(unknownError('Archive allowed only for active workspaces'))
|
||||
|
@ -46,7 +46,8 @@ const envMap: { [key in keyof Config]: string } = {
|
||||
SkipWorkspaces: 'SKIP_WORKSPACES',
|
||||
Storage: 'STORAGE',
|
||||
WorkspaceStorage: 'WORKSPACE_STORAGE',
|
||||
Region: 'REGION'
|
||||
Region: 'REGION',
|
||||
Parallel: 'PARALLEL'
|
||||
}
|
||||
|
||||
const required: Array<keyof Config> = [
|
||||
@ -72,7 +73,8 @@ export const config: () => Config = () => {
|
||||
SkipWorkspaces: process.env[envMap.SkipWorkspaces] ?? '',
|
||||
WorkspaceStorage: process.env[envMap.WorkspaceStorage],
|
||||
Storage: process.env[envMap.Storage],
|
||||
Region: process.env[envMap.Region] ?? ''
|
||||
Region: process.env[envMap.Region] ?? '',
|
||||
Parallel: parseInt(process.env[envMap.Parallel] ?? '1')
|
||||
}
|
||||
|
||||
const missingEnv = required.filter((key) => params[key] === undefined).map((key) => envMap[key])
|
||||
|
@ -18,9 +18,11 @@ import core, {
|
||||
BaseWorkspaceInfo,
|
||||
DOMAIN_TX,
|
||||
getWorkspaceId,
|
||||
groupByArray,
|
||||
Hierarchy,
|
||||
isActiveMode,
|
||||
ModelDb,
|
||||
RateLimiter,
|
||||
SortingOrder,
|
||||
systemAccountEmail,
|
||||
type BackupStatus,
|
||||
@ -40,6 +42,7 @@ import {
|
||||
import { generateToken } from '@hcengineering/server-token'
|
||||
import { backup, restore } from '.'
|
||||
import { createStorageBackupStorage } from './storage'
|
||||
import { clearInterval } from 'node:timers'
|
||||
export interface BackupConfig {
|
||||
AccountsURL: string
|
||||
Token: string
|
||||
@ -50,6 +53,8 @@ export interface BackupConfig {
|
||||
Timeout: number // Timeout in seconds
|
||||
BucketName: string
|
||||
SkipWorkspaces: string
|
||||
|
||||
Parallel: number
|
||||
}
|
||||
|
||||
class BackupWorker {
|
||||
@ -118,16 +123,23 @@ class BackupWorker {
|
||||
const workspacesIgnore = new Set(this.config.SkipWorkspaces.split(';'))
|
||||
ctx.info('skipped workspaces', { workspacesIgnore })
|
||||
let skipped = 0
|
||||
const now = Date.now()
|
||||
const allWorkspaces = await listAccountWorkspaces(this.config.Token, this.region)
|
||||
const workspaces = allWorkspaces.filter((it) => {
|
||||
let workspaces = allWorkspaces.filter((it) => {
|
||||
if (!isActiveMode(it.mode)) {
|
||||
// We should backup only active workspaces
|
||||
skipped++
|
||||
return false
|
||||
}
|
||||
|
||||
const createdOn = Math.floor((now - it.createdOn) / 1000)
|
||||
if (createdOn <= 2) {
|
||||
// Skip if we created is less 2 days
|
||||
return false
|
||||
}
|
||||
|
||||
const lastBackup = it.backupInfo?.lastBackup ?? 0
|
||||
if ((Date.now() - lastBackup) / 1000 < this.config.Interval) {
|
||||
if ((now - lastBackup) / 1000 < this.config.Interval) {
|
||||
// No backup required, interval not elapsed
|
||||
skipped++
|
||||
return false
|
||||
@ -138,7 +150,7 @@ class BackupWorker {
|
||||
return false
|
||||
}
|
||||
|
||||
const lastVisitSec = Math.floor((Date.now() - it.lastVisit) / 1000)
|
||||
const lastVisitSec = Math.floor((now - it.lastVisit) / 1000)
|
||||
if (lastVisitSec > this.config.Interval) {
|
||||
// No backup required, interval not elapsed
|
||||
skipped++
|
||||
@ -146,15 +158,31 @@ class BackupWorker {
|
||||
}
|
||||
return !workspacesIgnore.has(it.workspace)
|
||||
})
|
||||
|
||||
workspaces.sort((a, b) => {
|
||||
const lastBackupMin = Math.round(((a.backupInfo?.lastBackup ?? 0) - (b.backupInfo?.lastBackup ?? 0)) / 60)
|
||||
if (lastBackupMin === 0) {
|
||||
// Same minute, sort by backup size
|
||||
return (a.backupInfo?.backupSize ?? 0) - (b.backupInfo?.backupSize ?? 0)
|
||||
}
|
||||
return lastBackupMin
|
||||
return (a.backupInfo?.lastBackup ?? 0) - (b.backupInfo?.lastBackup ?? 0)
|
||||
})
|
||||
|
||||
// Shift new with existing ones.
|
||||
const existingNew = groupByArray(workspaces, (it) => it.backupInfo != null)
|
||||
|
||||
const existing = existingNew.get(true) ?? []
|
||||
const newOnes = existingNew.get(false) ?? []
|
||||
const mixedBackupSorting: BaseWorkspaceInfo[] = []
|
||||
|
||||
while (existing.length > 0 || newOnes.length > 0) {
|
||||
const e = existing.shift()
|
||||
const n = newOnes.shift()
|
||||
if (e != null) {
|
||||
mixedBackupSorting.push(e)
|
||||
}
|
||||
if (n != null) {
|
||||
mixedBackupSorting.push(n)
|
||||
}
|
||||
}
|
||||
|
||||
workspaces = mixedBackupSorting
|
||||
|
||||
ctx.warn('Preparing for BACKUP', {
|
||||
total: workspaces.length,
|
||||
skipped,
|
||||
@ -168,134 +196,157 @@ class BackupWorker {
|
||||
idx: ++idx,
|
||||
workspace: ws.workspaceUrl ?? ws.workspace,
|
||||
backupSize: ws.backupInfo?.backupSize ?? 0,
|
||||
lastBackupSec: (Date.now() - (ws.backupInfo?.lastBackup ?? 0)) / 1000
|
||||
lastBackupSec: (now - (ws.backupInfo?.lastBackup ?? 0)) / 1000
|
||||
})
|
||||
}
|
||||
|
||||
return await this.doBackup(ctx, workspaces, recheckTimeout)
|
||||
}
|
||||
|
||||
async doBackup (
|
||||
rootCtx: MeasureContext,
|
||||
workspaces: BaseWorkspaceInfo[],
|
||||
recheckTimeout: number,
|
||||
notify?: (progress: number) => Promise<void>
|
||||
): Promise<{ failedWorkspaces: BaseWorkspaceInfo[], processed: number, skipped: number }> {
|
||||
let index = 0
|
||||
|
||||
const failedWorkspaces: BaseWorkspaceInfo[] = []
|
||||
let processed = 0
|
||||
const startTime = Date.now()
|
||||
for (const ws of workspaces) {
|
||||
if (this.canceled || Date.now() - startTime > recheckTimeout) {
|
||||
return { failedWorkspaces, processed, skipped: workspaces.length - processed }
|
||||
}
|
||||
index++
|
||||
const st = Date.now()
|
||||
rootCtx.warn('\n\nBACKUP WORKSPACE ', {
|
||||
workspace: ws.workspace,
|
||||
|
||||
const rateLimiter = new RateLimiter(this.config.Parallel)
|
||||
|
||||
const times: number[] = []
|
||||
|
||||
const infoTo = setInterval(() => {
|
||||
const avgTime = times.length > 0 ? Math.round(times.reduce((p, c) => p + c, 0) / times.length) / 1000 : 0
|
||||
ctx.warn('********** backup info **********', {
|
||||
processed,
|
||||
toGo: workspaces.length - processed,
|
||||
avgTime,
|
||||
index,
|
||||
total: workspaces.length
|
||||
Elapsed: (Date.now() - startTime) / 1000,
|
||||
ETA: Math.round((workspaces.length - processed) * avgTime)
|
||||
})
|
||||
const ctx = rootCtx.newChild(ws.workspace, { workspace: ws.workspace })
|
||||
let pipeline: Pipeline | undefined
|
||||
try {
|
||||
const storage = await createStorageBackupStorage(
|
||||
ctx,
|
||||
this.storageAdapter,
|
||||
getWorkspaceId(this.config.BucketName),
|
||||
ws.workspace
|
||||
)
|
||||
const wsUrl: WorkspaceIdWithUrl = {
|
||||
name: ws.workspace,
|
||||
uuid: ws.uuid,
|
||||
workspaceName: ws.workspaceName ?? '',
|
||||
workspaceUrl: ws.workspaceUrl ?? ''
|
||||
}, 10000)
|
||||
for (const ws of workspaces) {
|
||||
await rateLimiter.add(async () => {
|
||||
index++
|
||||
if (this.canceled || Date.now() - startTime > recheckTimeout) {
|
||||
return // If canceled, we should stop
|
||||
}
|
||||
const result = await ctx.with('backup', { workspace: ws.workspace }, (ctx) =>
|
||||
backup(ctx, '', getWorkspaceId(ws.workspace), storage, {
|
||||
skipDomains: this.skipDomains,
|
||||
force: true,
|
||||
freshBackup: this.freshWorkspace,
|
||||
clean: this.clean,
|
||||
timeout: this.config.Timeout * 1000,
|
||||
connectTimeout: 5 * 60 * 1000, // 5 minutes to,
|
||||
blobDownloadLimit: this.downloadLimit,
|
||||
skipBlobContentTypes: [],
|
||||
storageAdapter: this.workspaceStorageAdapter,
|
||||
getLastTx: async (): Promise<Tx | undefined> => {
|
||||
const config = this.getConfig(ctx, wsUrl, null, this.workspaceStorageAdapter)
|
||||
const adapterConf = config.adapters[config.domains[DOMAIN_TX]]
|
||||
const hierarchy = new Hierarchy()
|
||||
const modelDb = new ModelDb(hierarchy)
|
||||
const txAdapter = await adapterConf.factory(
|
||||
ctx,
|
||||
this.contextVars,
|
||||
hierarchy,
|
||||
adapterConf.url,
|
||||
wsUrl,
|
||||
modelDb,
|
||||
this.workspaceStorageAdapter
|
||||
)
|
||||
try {
|
||||
await txAdapter.init?.(ctx, this.contextVars)
|
||||
const st = Date.now()
|
||||
const result = await this.doBackup(ctx, ws)
|
||||
const totalTime = Date.now() - st
|
||||
times.push(totalTime)
|
||||
if (!result) {
|
||||
failedWorkspaces.push(ws)
|
||||
return
|
||||
}
|
||||
processed++
|
||||
})
|
||||
}
|
||||
|
||||
return (
|
||||
await txAdapter.rawFindAll<Tx>(
|
||||
DOMAIN_TX,
|
||||
{ objectSpace: { $ne: core.space.Model } },
|
||||
{ limit: 1, sort: { modifiedOn: SortingOrder.Descending } }
|
||||
)
|
||||
).shift()
|
||||
} finally {
|
||||
await txAdapter.close()
|
||||
}
|
||||
},
|
||||
getConnection: async () => {
|
||||
if (pipeline === undefined) {
|
||||
pipeline = await this.pipelineFactory(ctx, wsUrl, true, () => {}, null)
|
||||
}
|
||||
return wrapPipeline(ctx, pipeline, wsUrl)
|
||||
},
|
||||
progress: (progress) => {
|
||||
return notify?.(progress) ?? Promise.resolve()
|
||||
await rateLimiter.waitProcessing()
|
||||
clearInterval(infoTo)
|
||||
return { failedWorkspaces, processed, skipped: workspaces.length - processed }
|
||||
}
|
||||
|
||||
async doBackup (
|
||||
rootCtx: MeasureContext,
|
||||
ws: BaseWorkspaceInfo,
|
||||
notify?: (progress: number) => Promise<void>
|
||||
): Promise<boolean> {
|
||||
const st = Date.now()
|
||||
rootCtx.warn('\n\nBACKUP WORKSPACE ', {
|
||||
workspace: ws.workspace
|
||||
})
|
||||
const ctx = rootCtx.newChild(ws.workspace, { workspace: ws.workspace })
|
||||
let pipeline: Pipeline | undefined
|
||||
try {
|
||||
const storage = await createStorageBackupStorage(
|
||||
ctx,
|
||||
this.storageAdapter,
|
||||
getWorkspaceId(this.config.BucketName),
|
||||
ws.workspace
|
||||
)
|
||||
const wsUrl: WorkspaceIdWithUrl = {
|
||||
name: ws.workspace,
|
||||
uuid: ws.uuid,
|
||||
workspaceName: ws.workspaceName ?? '',
|
||||
workspaceUrl: ws.workspaceUrl ?? ''
|
||||
}
|
||||
const result = await ctx.with('backup', { workspace: ws.workspace }, (ctx) =>
|
||||
backup(ctx, '', getWorkspaceId(ws.workspace), storage, {
|
||||
skipDomains: this.skipDomains,
|
||||
force: true,
|
||||
freshBackup: this.freshWorkspace,
|
||||
clean: this.clean,
|
||||
timeout: this.config.Timeout * 1000,
|
||||
connectTimeout: 5 * 60 * 1000, // 5 minutes to,
|
||||
blobDownloadLimit: this.downloadLimit,
|
||||
skipBlobContentTypes: [],
|
||||
storageAdapter: this.workspaceStorageAdapter,
|
||||
getLastTx: async (): Promise<Tx | undefined> => {
|
||||
const config = this.getConfig(ctx, wsUrl, null, this.workspaceStorageAdapter)
|
||||
const adapterConf = config.adapters[config.domains[DOMAIN_TX]]
|
||||
const hierarchy = new Hierarchy()
|
||||
const modelDb = new ModelDb(hierarchy)
|
||||
const txAdapter = await adapterConf.factory(
|
||||
ctx,
|
||||
this.contextVars,
|
||||
hierarchy,
|
||||
adapterConf.url,
|
||||
wsUrl,
|
||||
modelDb,
|
||||
this.workspaceStorageAdapter
|
||||
)
|
||||
try {
|
||||
await txAdapter.init?.(ctx, this.contextVars)
|
||||
|
||||
return (
|
||||
await txAdapter.rawFindAll<Tx>(
|
||||
DOMAIN_TX,
|
||||
{ objectSpace: { $ne: core.space.Model } },
|
||||
{ limit: 1, sort: { modifiedOn: SortingOrder.Descending } }
|
||||
)
|
||||
).shift()
|
||||
} finally {
|
||||
await txAdapter.close()
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
if (result.result) {
|
||||
const backupInfo: BackupStatus = {
|
||||
backups: (ws.backupInfo?.backups ?? 0) + 1,
|
||||
lastBackup: Date.now(),
|
||||
backupSize: Math.round((result.backupSize * 100) / (1024 * 1024)) / 100,
|
||||
dataSize: Math.round((result.dataSize * 100) / (1024 * 1024)) / 100,
|
||||
blobsSize: Math.round((result.blobsSize * 100) / (1024 * 1024)) / 100
|
||||
},
|
||||
getConnection: async () => {
|
||||
if (pipeline === undefined) {
|
||||
pipeline = await this.pipelineFactory(ctx, wsUrl, true, () => {}, null)
|
||||
}
|
||||
return wrapPipeline(ctx, pipeline, wsUrl)
|
||||
},
|
||||
progress: (progress) => {
|
||||
return notify?.(progress) ?? Promise.resolve()
|
||||
}
|
||||
rootCtx.warn('BACKUP STATS', {
|
||||
workspace: ws.workspace,
|
||||
workspaceUrl: ws.workspaceUrl,
|
||||
workspaceName: ws.workspaceName,
|
||||
index,
|
||||
...backupInfo,
|
||||
time: Math.round((Date.now() - st) / 1000),
|
||||
total: workspaces.length
|
||||
})
|
||||
// We need to report update for stats to account service
|
||||
processed += 1
|
||||
})
|
||||
)
|
||||
|
||||
const token = generateToken(systemAccountEmail, { name: ws.workspace }, { service: 'backup' })
|
||||
await updateBackupInfo(token, backupInfo)
|
||||
}
|
||||
} catch (err: any) {
|
||||
rootCtx.error('\n\nFAILED to BACKUP', { workspace: ws.workspace, err })
|
||||
failedWorkspaces.push(ws)
|
||||
} finally {
|
||||
if (pipeline !== undefined) {
|
||||
await pipeline.close()
|
||||
if (result.result) {
|
||||
const backupInfo: BackupStatus = {
|
||||
backups: (ws.backupInfo?.backups ?? 0) + 1,
|
||||
lastBackup: Date.now(),
|
||||
backupSize: Math.round((result.backupSize * 100) / (1024 * 1024)) / 100,
|
||||
dataSize: Math.round((result.dataSize * 100) / (1024 * 1024)) / 100,
|
||||
blobsSize: Math.round((result.blobsSize * 100) / (1024 * 1024)) / 100
|
||||
}
|
||||
rootCtx.warn('BACKUP STATS', {
|
||||
workspace: ws.workspace,
|
||||
workspaceUrl: ws.workspaceUrl,
|
||||
workspaceName: ws.workspaceName,
|
||||
...backupInfo,
|
||||
time: Math.round((Date.now() - st) / 1000)
|
||||
})
|
||||
// We need to report update for stats to account service
|
||||
const token = generateToken(systemAccountEmail, { name: ws.workspace }, { service: 'backup' })
|
||||
await updateBackupInfo(token, backupInfo)
|
||||
}
|
||||
} catch (err: any) {
|
||||
rootCtx.error('\n\nFAILED to BACKUP', { workspace: ws.workspace, err })
|
||||
return false
|
||||
} finally {
|
||||
if (pipeline !== undefined) {
|
||||
await pipeline.close()
|
||||
}
|
||||
}
|
||||
return { failedWorkspaces, processed, skipped: workspaces.length - processed }
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
@ -367,9 +418,9 @@ export async function doBackupWorkspace (
|
||||
skipDomains
|
||||
)
|
||||
backupWorker.downloadLimit = downloadLimit
|
||||
const { processed } = await backupWorker.doBackup(ctx, [workspace], Number.MAX_VALUE, notify)
|
||||
const result = await backupWorker.doBackup(ctx, workspace, notify)
|
||||
await backupWorker.close()
|
||||
return processed === 1
|
||||
return result
|
||||
}
|
||||
|
||||
export async function doRestoreWorkspace (
|
||||
|
@ -283,17 +283,21 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
}
|
||||
}
|
||||
|
||||
async clearIndex (): Promise<void> {
|
||||
const ctx = this.metrics
|
||||
const migrations = await this.storage.findAll<MigrationState>(ctx, core.class.MigrationState, {
|
||||
plugin: coreId,
|
||||
state: {
|
||||
$in: ['verify-indexes-v2', 'full-text-indexer-v4', 'full-text-structure-v4']
|
||||
}
|
||||
})
|
||||
async clearIndex (onlyDrop = false): Promise<void> {
|
||||
if (!onlyDrop) {
|
||||
const ctx = this.metrics
|
||||
const migrations = await this.storage.findAll<MigrationState>(ctx, core.class.MigrationState, {
|
||||
plugin: coreId,
|
||||
state: {
|
||||
$in: ['verify-indexes-v2', 'full-text-indexer-v4', 'full-text-structure-v4']
|
||||
}
|
||||
})
|
||||
|
||||
const refs = migrations.map((it) => it._id)
|
||||
await this.storage.clean(ctx, DOMAIN_MIGRATION, refs)
|
||||
const refs = migrations.map((it) => it._id)
|
||||
await this.storage.clean(ctx, DOMAIN_MIGRATION, refs)
|
||||
} else {
|
||||
await this.fulltextAdapter.clean(this.metrics, this.workspace)
|
||||
}
|
||||
}
|
||||
|
||||
broadcastClasses = new Set<Ref<Class<Doc>>>()
|
||||
|
@ -27,6 +27,9 @@ export function createPostgreeDestroyAdapter (url: string): WorkspaceDestroyAdap
|
||||
deleteWorkspace: async (ctx, contextVars, workspace): Promise<void> => {
|
||||
const client = getDBClient(contextVars, url)
|
||||
try {
|
||||
if (workspace.uuid == null) {
|
||||
throw new Error('Workspace uuid is not defined')
|
||||
}
|
||||
const connection = await client.getClient()
|
||||
|
||||
await ctx.with('delete-workspace', {}, async () => {
|
||||
@ -34,15 +37,14 @@ export function createPostgreeDestroyAdapter (url: string): WorkspaceDestroyAdap
|
||||
for (const [domain] of Object.entries(domainSchemas)) {
|
||||
await ctx.with('delete-workspace-domain', {}, async () => {
|
||||
await retryTxn(connection, async (client) => {
|
||||
await client.unsafe(`delete from ${domain} where "workspaceId" = $1::uuid`, [
|
||||
workspace.uuid ?? workspace.name
|
||||
])
|
||||
await client.unsafe(`delete from ${domain} where "workspaceId" = $1::uuid`, [workspace.uuid as string])
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
} catch (err: any) {
|
||||
ctx.error('failed to clean workspace data', { err })
|
||||
throw err
|
||||
} finally {
|
||||
client.close()
|
||||
}
|
||||
|
@ -110,6 +110,11 @@ export function serveWorkspaceAccount (
|
||||
setMetadata(serverClientPlugin.metadata.UserAgent, 'WorkspaceService')
|
||||
setMetadata(serverNotification.metadata.InboxOnlyNotifications, true)
|
||||
|
||||
const fulltextUrl = process.env.FULLTEXT_URL
|
||||
if (fulltextUrl === undefined) {
|
||||
console.log('Please provide fulltext url to be able to clean fulltext index')
|
||||
}
|
||||
|
||||
let canceled = false
|
||||
|
||||
const worker = new WorkspaceWorker(
|
||||
@ -119,7 +124,8 @@ export function serveWorkspaceAccount (
|
||||
region,
|
||||
parseInt(process.env.PARALLEL ?? '1'),
|
||||
wsOperation,
|
||||
brandings
|
||||
brandings,
|
||||
fulltextUrl
|
||||
)
|
||||
|
||||
void worker
|
||||
|
@ -106,7 +106,8 @@ export class WorkspaceWorker {
|
||||
readonly region: string,
|
||||
readonly limit: number,
|
||||
readonly operation: WorkspaceOperation,
|
||||
readonly brandings: BrandingMap
|
||||
readonly brandings: BrandingMap,
|
||||
readonly fulltextUrl: string | undefined
|
||||
) {}
|
||||
|
||||
hasAvailableThread (): boolean {
|
||||
@ -366,10 +367,36 @@ export class WorkspaceWorker {
|
||||
}
|
||||
}
|
||||
|
||||
async doCleanup (ctx: MeasureContext, workspace: BaseWorkspaceInfo): Promise<void> {
|
||||
/**
|
||||
* If onlyDrop is true, will drop workspace from database, overwize remove only indexes and do full reindex.
|
||||
*/
|
||||
async doCleanup (ctx: MeasureContext, workspace: BaseWorkspaceInfo, onlyDrop: boolean): Promise<void> {
|
||||
const { dbUrl } = prepareTools([])
|
||||
const adapter = getWorkspaceDestroyAdapter(dbUrl)
|
||||
await adapter.deleteWorkspace(ctx, sharedPipelineContextVars, { name: workspace.workspace })
|
||||
await adapter.deleteWorkspace(ctx, sharedPipelineContextVars, { name: workspace.workspace, uuid: workspace.uuid })
|
||||
|
||||
await this.doReindexFulltext(ctx, workspace, onlyDrop)
|
||||
}
|
||||
|
||||
private async doReindexFulltext (ctx: MeasureContext, workspace: BaseWorkspaceInfo, onlyDrop: boolean): Promise<void> {
|
||||
if (this.fulltextUrl !== undefined) {
|
||||
const token = generateToken(systemAccountEmail, { name: workspace.workspace }, { service: 'workspace' })
|
||||
|
||||
try {
|
||||
const res = await fetch(this.fulltextUrl + '/api/v1/reindex', {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ token, onlyDrop })
|
||||
})
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP Error ${res.status} ${res.statusText}`)
|
||||
}
|
||||
} catch (err: any) {
|
||||
ctx.error('failed to reset index', { err })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async doWorkspaceOperation (
|
||||
@ -410,7 +437,7 @@ export class WorkspaceWorker {
|
||||
// We should remove DB, not storages.
|
||||
await sendEvent('archiving-clean-started', 0)
|
||||
try {
|
||||
await this.doCleanup(ctx, workspace)
|
||||
await this.doCleanup(ctx, workspace, false)
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
return
|
||||
@ -418,6 +445,19 @@ export class WorkspaceWorker {
|
||||
await sendEvent('archiving-clean-done', 100)
|
||||
break
|
||||
}
|
||||
case 'pending-deletion':
|
||||
case 'deleting': {
|
||||
// We should remove DB, not storages.
|
||||
await sendEvent('delete-started', 0)
|
||||
try {
|
||||
await this.doCleanup(ctx, workspace, true)
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
return
|
||||
}
|
||||
await sendEvent('delete-done', 100)
|
||||
break
|
||||
}
|
||||
|
||||
case 'migration-pending-backup':
|
||||
case 'migration-backup':
|
||||
@ -431,7 +471,7 @@ export class WorkspaceWorker {
|
||||
// We should remove DB, not storages.
|
||||
await sendEvent('migrate-clean-started', 0)
|
||||
try {
|
||||
await this.doCleanup(ctx, workspace)
|
||||
await this.doCleanup(ctx, workspace, false)
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
return
|
||||
@ -443,13 +483,11 @@ export class WorkspaceWorker {
|
||||
case 'restoring':
|
||||
await sendEvent('restore-started', 0)
|
||||
if (await this.doRestore(ctx, workspace, opt)) {
|
||||
// We should reindex fulltext
|
||||
await this.doReindexFulltext(ctx, workspace, false)
|
||||
await sendEvent('restore-done', 100)
|
||||
}
|
||||
break
|
||||
case 'deleting':
|
||||
// Seems we failed to delete, so let's restore deletion.
|
||||
// TODO: move from account
|
||||
break
|
||||
default:
|
||||
ctx.error('Unknown workspace mode', { workspace: workspace.workspace, mode: workspace.mode })
|
||||
}
|
||||
@ -506,7 +544,8 @@ export class WorkspaceWorker {
|
||||
Timeout: 0,
|
||||
SkipWorkspaces: '',
|
||||
AccountsURL: '',
|
||||
Interval: 0
|
||||
Interval: 0,
|
||||
Parallel: 1
|
||||
},
|
||||
pipelineFactory,
|
||||
workspaceStorageAdapter,
|
||||
|
@ -110,7 +110,7 @@ export async function createWorkspace (
|
||||
storageAdapter,
|
||||
ctxModellogger,
|
||||
async (event, version, value) => {
|
||||
ctx.info('Init script progress', { event, value })
|
||||
ctx.info('upgrade workspace', { event, value })
|
||||
await handleWsEvent?.('progress', version, 80 + Math.round((Math.min(value, 100) / 100) * 20))
|
||||
},
|
||||
false,
|
||||
|
Loading…
Reference in New Issue
Block a user