QFIX: Hash update (#7946)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2025-02-06 13:42:49 +07:00 committed by GitHub
parent 77f27a289a
commit a531ba34db
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 40 additions and 109 deletions

10
.vscode/launch.json vendored
View File

@ -55,7 +55,7 @@
"MODEL_JSON": "${workspaceRoot}/models/all/bundle/model.json",
// "SERVER_PROVIDER":"uweb"
"SERVER_PROVIDER":"ws",
"MODEL_VERSION": "0.6.427",
"MODEL_VERSION": "0.6.431",
// "VERSION": "0.6.289",
"ELASTIC_INDEX_NAME": "local_storage_index",
"UPLOAD_URL": "/files",
@ -167,7 +167,7 @@
"MINIO_ACCESS_KEY": "minioadmin",
"MINIO_SECRET_KEY": "minioadmin",
"MINIO_ENDPOINT": "localhost",
"MODEL_VERSION": "v0.6.427",
"MODEL_VERSION": "v0.6.431",
"WS_OPERATION": "all+backup",
"BACKUP_STORAGE": "minio|minio?accessKey=minioadmin&secretKey=minioadmin",
"BACKUP_BUCKET": "dev-backups",
@ -200,7 +200,7 @@
"MINIO_ACCESS_KEY": "minioadmin",
"MINIO_SECRET_KEY": "minioadmin",
"MINIO_ENDPOINT": "localhost",
"MODEL_VERSION": "0.6.427",
"MODEL_VERSION": "0.6.431",
"WS_OPERATION": "all+backup",
"BACKUP_STORAGE": "minio|minio?accessKey=minioadmin&secretKey=minioadmin",
"BACKUP_BUCKET": "dev-backups",
@ -332,7 +332,7 @@
"ACCOUNT_DB_URL": "mongodb://localhost:27017",
"TELEGRAM_DATABASE": "telegram-service",
"REKONI_URL": "http://localhost:4004",
"MODEL_VERSION": "0.6.427"
"MODEL_VERSION": "0.6.431"
},
"runtimeVersion": "20",
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
@ -359,7 +359,7 @@
"MONGO_URL": "mongodb://localhost:27017",
"TELEGRAM_DATABASE": "telegram-service",
"REKONI_URL": "http://localhost:4004",
"MODEL_VERSION": "0.6.427"
"MODEL_VERSION": "0.6.431"
},
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
"sourceMaps": true,

View File

@ -1 +1 @@
"0.6.421"
"0.6.431"

View File

@ -16,45 +16,18 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import accountPlugin, {
assignWorkspace,
confirmEmail,
getAccount,
getWorkspaceById,
updateArchiveInfo,
signUpByEmail,
createWorkspaceRecord,
updateWorkspaceInfo,
flattenStatus,
getAccountDB,
getWorkspaceInfoWithStatusById,
flattenStatus,
type WorkspaceInfoWithStatus,
type AccountDB,
type Workspace,
getEmailSocialId
signUpByEmail,
updateWorkspaceInfo,
type AccountDB
} from '@hcengineering/account'
import { backupWorkspace } from '@hcengineering/backup-service'
import { setMetadata } from '@hcengineering/platform'
import { createFileBackupStorage, createStorageBackupStorage, restore } from '@hcengineering/server-backup'
import serverClientPlugin, { getAccountClient } from '@hcengineering/server-client'
import {
backup,
backupFind,
backupList,
backupRemoveLast,
backupSize,
checkBackupIntegrity,
compactBackup,
createFileBackupStorage,
createStorageBackupStorage,
restore
} from '@hcengineering/server-backup'
import serverClientPlugin, {
BlobClient,
createClient,
getTransactorEndpoint,
getAccountClient
} from '@hcengineering/server-client'
import {
createBackupPipeline,
getConfig,
getWorkspaceDestroyAdapter,
registerAdapterFactory,
registerDestroyFactory,
registerServerPlugins,
@ -62,45 +35,31 @@ import {
registerTxAdapterFactory,
sharedPipelineContextVars
} from '@hcengineering/server-pipeline'
import serverToken, { decodeToken, generateToken } from '@hcengineering/server-token'
import { buildModel, FileModelLogger } from '@hcengineering/server-tool'
import serverToken from '@hcengineering/server-token'
import { createWorkspace, upgradeWorkspace } from '@hcengineering/workspace-service'
import path from 'path'
import { buildStorageFromConfig, createStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
import { program, type Command } from 'commander'
import { addControlledDocumentRank } from './qms'
import { clearTelegramHistory } from './telegram'
import { diffWorkspace, updateField } from './workspace'
import { updateField } from './workspace'
import core, {
import {
AccountRole,
generateId,
isActiveMode,
isArchivingMode,
MeasureMetricsContext,
metricsToString,
RateLimiter,
versionToString,
type Data,
type Doc,
type Ref,
type Tx,
type Version
type Version,
type WorkspaceDataId
} from '@hcengineering/core'
import { consoleModelLogger, type MigrateOperation } from '@hcengineering/model'
import contact from '@hcengineering/model-contact'
import {
createMongoAdapter,
createMongoDestroyAdapter,
createMongoTxAdapter,
getMongoClient,
getWorkspaceMongoDB,
shutdownMongo
} from '@hcengineering/mongo'
import { backupDownload } from '@hcengineering/server-backup/src/backup'
import { createDatalakeClient, CONFIG_KIND as DATALAKE_CONFIG_KIND, type DatalakeConfig } from '@hcengineering/datalake'
import { getModelVersion } from '@hcengineering/model-all'
import {
createPostgreeDestroyAdapter,
@ -108,45 +67,12 @@ import {
createPostgresTxAdapter,
shutdownPostgres
} from '@hcengineering/postgres'
import { CONFIG_KIND as S3_CONFIG_KIND, S3Service, type S3Config } from '@hcengineering/s3'
import type { PipelineFactory, StorageAdapter, StorageAdapterEx } from '@hcengineering/server-core'
import { deepEqual } from 'fast-equals'
import { createWriteStream, readFileSync } from 'fs'
import { getAccountDBUrl, getMongoDBUrl } from './__start'
import type { StorageAdapter } from '@hcengineering/server-core'
import { getAccountDBUrl } from './__start'
// import { fillGithubUsers, fixAccountEmails, renameAccount } from './account'
import {
benchmark,
benchmarkWorker,
generateWorkspaceData,
stressBenchmark,
testFindAll,
type StressBenchmarkMode
} from './benchmark'
import {
cleanArchivedSpaces,
cleanRemovedTransactions,
cleanWorkspace,
fixCommentDoubleIdCreate,
fixMinioBW,
fixSkills,
optimizeModel,
removeDuplicateIds,
restoreHrTaskTypesFromUpdates,
restoreRecruitingTaskTypes
} from './clean'
import { changeConfiguration } from './configuration'
import {
generateUuidMissingWorkspaces,
moveAccountDbFromMongoToPG,
moveFromMongoToPG,
moveWorkspaceFromMongoToPG,
updateDataWorkspaceIdToUuid
} from './db'
import { reindexWorkspace } from './fulltext'
import { restoreControlledDocContentMongo, restoreMarkupRefsMongo, restoreWikiContentMongo } from './markup'
import { fixMixinForeignAttributes, showMixinForeignAttributes } from './mixin'
import { copyToDatalake, moveFiles, showLostFiles } from './storage'
import { getToolToken, getWorkspace, getWorkspaceTransactorEndpoint } from './utils'
const colorConstants = {
@ -1212,20 +1138,25 @@ export function devTool (
// await storageAdapter.close()
// })
// program
// .command('backup-s3-download <bucketName> <dirName> <storeIn>')
// .description('Download a full backup from s3 to local dir')
// .action(async (bucketName: string, dirName: string, storeIn: string, cmd) => {
// const backupStorageConfig = storageConfigFromEnv(process.env.STORAGE)
// const storageAdapter = createStorageFromConfig(backupStorageConfig.storages[0])
// try {
// const storage = await createStorageBackupStorage(toolCtx, storageAdapter, getWorkspaceId(bucketName), dirName)
// await backupDownload(storage, storeIn)
// } catch (err: any) {
// toolCtx.error('failed to size backup', { err })
// }
// await storageAdapter.close()
// })
program
.command('backup-s3-download <bucketName> <dirName> <storeIn>')
.description('Download a full backup from s3 to local dir')
.action(async (bucketName: string, dirName: string, storeIn: string, cmd) => {
const backupStorageConfig = storageConfigFromEnv(process.env.STORAGE)
const storageAdapter = createStorageFromConfig(backupStorageConfig.storages[0])
try {
const storage = await createStorageBackupStorage(
toolCtx,
storageAdapter,
bucketName as WorkspaceDataId,
dirName
)
await backupDownload(storage, storeIn)
} catch (err: any) {
toolCtx.error('failed to size backup', { err })
}
await storageAdapter.close()
})
// program
// .command('copy-s3-datalake')

View File

@ -665,7 +665,7 @@ export const coreOperation: MigrateOperation = {
func: migrateCollaborativeContentToStorage
},
{
state: 'fix-backups-hash-timestamp',
state: 'fix-backups-hash-timestamp-v2',
func: async (client: MigrationClient): Promise<void> => {
const now = Date.now().toString(16)
for (const d of client.hierarchy.domains()) {

View File

@ -1928,7 +1928,7 @@ class PostgresAdapter extends PostgresAdapterBase {
for (const tx of txes) {
const fields: string[] = ['modifiedBy', 'modifiedOn', '%hash%']
const updates: string[] = ['"modifiedBy" = $2', '"modifiedOn" = $3', '"%hash%" = $4']
const params: any[] = [tx.modifiedBy, tx.modifiedOn, null]
const params: any[] = [tx.modifiedBy, tx.modifiedOn, this.curHash()]
let paramsIndex = params.length
const { extractedFields, remainingData } = parseUpdate(tx.operations, schemaFields)
const { space, attachedTo, ...ops } = tx.operations as any