mirror of
https://github.com/hcengineering/platform.git
synced 2025-05-04 14:28:15 +00:00
UBERF-9224: Use context variables to hold context data (#7754)
Some checks are pending
CI / build (push) Waiting to run
CI / svelte-check (push) Blocked by required conditions
CI / formatting (push) Blocked by required conditions
CI / test (push) Blocked by required conditions
CI / uitest (push) Waiting to run
CI / uitest-pg (push) Waiting to run
CI / uitest-qms (push) Waiting to run
CI / docker-build (push) Blocked by required conditions
CI / dist-build (push) Blocked by required conditions
Some checks are pending
CI / build (push) Waiting to run
CI / svelte-check (push) Blocked by required conditions
CI / formatting (push) Blocked by required conditions
CI / test (push) Blocked by required conditions
CI / uitest (push) Waiting to run
CI / uitest-pg (push) Waiting to run
CI / uitest-qms (push) Waiting to run
CI / docker-build (push) Blocked by required conditions
CI / dist-build (push) Blocked by required conditions
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
parent
9046204e0a
commit
f8a8c94bc2
@ -28,6 +28,7 @@ import {
|
|||||||
} from '@hcengineering/postgres'
|
} from '@hcengineering/postgres'
|
||||||
import { type DBDoc } from '@hcengineering/postgres/types/utils'
|
import { type DBDoc } from '@hcengineering/postgres/types/utils'
|
||||||
import { getTransactorEndpoint } from '@hcengineering/server-client'
|
import { getTransactorEndpoint } from '@hcengineering/server-client'
|
||||||
|
import { sharedPipelineContextVars } from '@hcengineering/server-pipeline'
|
||||||
import { generateToken } from '@hcengineering/server-token'
|
import { generateToken } from '@hcengineering/server-token'
|
||||||
import { connect } from '@hcengineering/server-tool'
|
import { connect } from '@hcengineering/server-tool'
|
||||||
import { type MongoClient, UUID } from 'mongodb'
|
import { type MongoClient, UUID } from 'mongodb'
|
||||||
@ -45,7 +46,7 @@ export async function moveFromMongoToPG (
|
|||||||
}
|
}
|
||||||
const client = getMongoClient(mongoUrl)
|
const client = getMongoClient(mongoUrl)
|
||||||
const mongo = await client.getClient()
|
const mongo = await client.getClient()
|
||||||
const pg = getDBClient(dbUrl)
|
const pg = getDBClient(sharedPipelineContextVars, dbUrl)
|
||||||
const pgClient = await pg.getClient()
|
const pgClient = await pg.getClient()
|
||||||
|
|
||||||
for (let index = 0; index < workspaces.length; index++) {
|
for (let index = 0; index < workspaces.length; index++) {
|
||||||
@ -168,7 +169,7 @@ export async function moveWorkspaceFromMongoToPG (
|
|||||||
}
|
}
|
||||||
const client = getMongoClient(mongoUrl)
|
const client = getMongoClient(mongoUrl)
|
||||||
const mongo = await client.getClient()
|
const mongo = await client.getClient()
|
||||||
const pg = getDBClient(dbUrl)
|
const pg = getDBClient(sharedPipelineContextVars, dbUrl)
|
||||||
const pgClient = await pg.getClient()
|
const pgClient = await pg.getClient()
|
||||||
|
|
||||||
await moveWorkspace(accountDb, mongo, pgClient, ws, region, include, force)
|
await moveWorkspace(accountDb, mongo, pgClient, ws, region, include, force)
|
||||||
@ -306,7 +307,7 @@ export async function updateDataWorkspaceIdToUuid (
|
|||||||
throw new Error('dbUrl is required')
|
throw new Error('dbUrl is required')
|
||||||
}
|
}
|
||||||
|
|
||||||
const pg = getDBClient(dbUrl)
|
const pg = getDBClient(sharedPipelineContextVars, dbUrl)
|
||||||
try {
|
try {
|
||||||
const pgClient = await pg.getClient()
|
const pgClient = await pg.getClient()
|
||||||
|
|
||||||
|
@ -66,10 +66,11 @@ import {
|
|||||||
registerDestroyFactory,
|
registerDestroyFactory,
|
||||||
registerServerPlugins,
|
registerServerPlugins,
|
||||||
registerStringLoaders,
|
registerStringLoaders,
|
||||||
registerTxAdapterFactory
|
registerTxAdapterFactory,
|
||||||
|
sharedPipelineContextVars
|
||||||
} from '@hcengineering/server-pipeline'
|
} from '@hcengineering/server-pipeline'
|
||||||
import serverToken, { decodeToken, generateToken } from '@hcengineering/server-token'
|
import serverToken, { decodeToken, generateToken } from '@hcengineering/server-token'
|
||||||
import { FileModelLogger, buildModel } from '@hcengineering/server-tool'
|
import { buildModel, FileModelLogger } from '@hcengineering/server-tool'
|
||||||
import { createWorkspace, upgradeWorkspace } from '@hcengineering/workspace-service'
|
import { createWorkspace, upgradeWorkspace } from '@hcengineering/workspace-service'
|
||||||
import path from 'path'
|
import path from 'path'
|
||||||
|
|
||||||
@ -105,12 +106,18 @@ import {
|
|||||||
createMongoTxAdapter,
|
createMongoTxAdapter,
|
||||||
getMongoClient,
|
getMongoClient,
|
||||||
getWorkspaceMongoDB,
|
getWorkspaceMongoDB,
|
||||||
shutdown
|
shutdownMongo
|
||||||
} from '@hcengineering/mongo'
|
} from '@hcengineering/mongo'
|
||||||
import { backupDownload } from '@hcengineering/server-backup/src/backup'
|
import { backupDownload } from '@hcengineering/server-backup/src/backup'
|
||||||
|
|
||||||
import { createDatalakeClient, CONFIG_KIND as DATALAKE_CONFIG_KIND, type DatalakeConfig } from '@hcengineering/datalake'
|
import { createDatalakeClient, CONFIG_KIND as DATALAKE_CONFIG_KIND, type DatalakeConfig } from '@hcengineering/datalake'
|
||||||
import { getModelVersion } from '@hcengineering/model-all'
|
import { getModelVersion } from '@hcengineering/model-all'
|
||||||
|
import {
|
||||||
|
createPostgreeDestroyAdapter,
|
||||||
|
createPostgresAdapter,
|
||||||
|
createPostgresTxAdapter,
|
||||||
|
shutdownPostgres
|
||||||
|
} from '@hcengineering/postgres'
|
||||||
import { CONFIG_KIND as S3_CONFIG_KIND, S3Service, type S3Config } from '@hcengineering/s3'
|
import { CONFIG_KIND as S3_CONFIG_KIND, S3Service, type S3Config } from '@hcengineering/s3'
|
||||||
import type { PipelineFactory, StorageAdapter, StorageAdapterEx } from '@hcengineering/server-core'
|
import type { PipelineFactory, StorageAdapter, StorageAdapterEx } from '@hcengineering/server-core'
|
||||||
import { deepEqual } from 'fast-equals'
|
import { deepEqual } from 'fast-equals'
|
||||||
@ -139,17 +146,16 @@ import {
|
|||||||
import { changeConfiguration } from './configuration'
|
import { changeConfiguration } from './configuration'
|
||||||
import {
|
import {
|
||||||
generateUuidMissingWorkspaces,
|
generateUuidMissingWorkspaces,
|
||||||
updateDataWorkspaceIdToUuid,
|
|
||||||
moveAccountDbFromMongoToPG,
|
moveAccountDbFromMongoToPG,
|
||||||
moveFromMongoToPG,
|
moveFromMongoToPG,
|
||||||
moveWorkspaceFromMongoToPG
|
moveWorkspaceFromMongoToPG,
|
||||||
|
updateDataWorkspaceIdToUuid
|
||||||
} from './db'
|
} from './db'
|
||||||
import { restoreControlledDocContentMongo, restoreWikiContentMongo, restoreMarkupRefsMongo } from './markup'
|
import { reindexWorkspace } from './fulltext'
|
||||||
|
import { restoreControlledDocContentMongo, restoreMarkupRefsMongo, restoreWikiContentMongo } from './markup'
|
||||||
import { fixMixinForeignAttributes, showMixinForeignAttributes } from './mixin'
|
import { fixMixinForeignAttributes, showMixinForeignAttributes } from './mixin'
|
||||||
import { fixAccountEmails, renameAccount, fillGithubUsers } from './account'
|
import { fixAccountEmails, renameAccount, fillGithubUsers } from './account'
|
||||||
import { copyToDatalake, moveFiles, showLostFiles } from './storage'
|
import { copyToDatalake, moveFiles, showLostFiles } from './storage'
|
||||||
import { createPostgresTxAdapter, createPostgresAdapter, createPostgreeDestroyAdapter } from '@hcengineering/postgres'
|
|
||||||
import { reindexWorkspace } from './fulltext'
|
|
||||||
|
|
||||||
const colorConstants = {
|
const colorConstants = {
|
||||||
colorRed: '\u001b[31m',
|
colorRed: '\u001b[31m',
|
||||||
@ -163,6 +169,16 @@ const colorConstants = {
|
|||||||
reset: '\u001b[0m'
|
reset: '\u001b[0m'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Register close on process exit.
|
||||||
|
process.on('exit', () => {
|
||||||
|
shutdownPostgres(sharedPipelineContextVars).catch((err) => {
|
||||||
|
console.error(err)
|
||||||
|
})
|
||||||
|
shutdownMongo(sharedPipelineContextVars).catch((err) => {
|
||||||
|
console.error(err)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
@ -220,7 +236,7 @@ export function devTool (
|
|||||||
}
|
}
|
||||||
closeAccountsDb()
|
closeAccountsDb()
|
||||||
console.log(`closing database connection to '${uri}'...`)
|
console.log(`closing database connection to '${uri}'...`)
|
||||||
await shutdown()
|
await shutdownMongo()
|
||||||
}
|
}
|
||||||
|
|
||||||
async function withStorage (f: (storageAdapter: StorageAdapter) => Promise<any>): Promise<void> {
|
async function withStorage (f: (storageAdapter: StorageAdapter) => Promise<any>): Promise<void> {
|
||||||
@ -648,6 +664,7 @@ export function devTool (
|
|||||||
true,
|
true,
|
||||||
true,
|
true,
|
||||||
5000, // 5 gigabytes per blob
|
5000, // 5 gigabytes per blob
|
||||||
|
sharedPipelineContextVars,
|
||||||
async (storage, workspaceStorage) => {
|
async (storage, workspaceStorage) => {
|
||||||
if (cmd.remove) {
|
if (cmd.remove) {
|
||||||
await updateArchiveInfo(toolCtx, db, ws.workspace, true)
|
await updateArchiveInfo(toolCtx, db, ws.workspace, true)
|
||||||
@ -667,7 +684,7 @@ export function devTool (
|
|||||||
|
|
||||||
const destroyer = getWorkspaceDestroyAdapter(dbUrl)
|
const destroyer = getWorkspaceDestroyAdapter(dbUrl)
|
||||||
|
|
||||||
await destroyer.deleteWorkspace(toolCtx, { name: ws.workspace })
|
await destroyer.deleteWorkspace(toolCtx, sharedPipelineContextVars, { name: ws.workspace })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -718,7 +735,8 @@ export function devTool (
|
|||||||
cmd.region,
|
cmd.region,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
100
|
100,
|
||||||
|
sharedPipelineContextVars
|
||||||
)
|
)
|
||||||
) {
|
) {
|
||||||
processed++
|
processed++
|
||||||
|
@ -412,7 +412,7 @@ export class ModelDb extends MemDb {
|
|||||||
this.updateDoc(cud.objectId, doc, cud)
|
this.updateDoc(cud.objectId, doc, cud)
|
||||||
TxProcessor.updateDoc2Doc(doc, cud)
|
TxProcessor.updateDoc2Doc(doc, cud)
|
||||||
} else {
|
} else {
|
||||||
ctx.error('no document found, failed to apply model transaction, skipping', {
|
ctx.warn('no document found, failed to apply model transaction, skipping', {
|
||||||
_id: tx._id,
|
_id: tx._id,
|
||||||
_class: tx._class,
|
_class: tx._class,
|
||||||
objectId: cud.objectId
|
objectId: cud.objectId
|
||||||
@ -424,7 +424,7 @@ export class ModelDb extends MemDb {
|
|||||||
try {
|
try {
|
||||||
this.delDoc((tx as TxRemoveDoc<Doc>).objectId)
|
this.delDoc((tx as TxRemoveDoc<Doc>).objectId)
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
ctx.error('no document found, failed to apply model transaction, skipping', {
|
ctx.warn('no document found, failed to apply model transaction, skipping', {
|
||||||
_id: tx._id,
|
_id: tx._id,
|
||||||
_class: tx._class,
|
_class: tx._class,
|
||||||
objectId: (tx as TxRemoveDoc<Doc>).objectId
|
objectId: (tx as TxRemoveDoc<Doc>).objectId
|
||||||
@ -438,7 +438,7 @@ export class ModelDb extends MemDb {
|
|||||||
this.updateDoc(mix.objectId, doc, mix)
|
this.updateDoc(mix.objectId, doc, mix)
|
||||||
TxProcessor.updateMixin4Doc(doc, mix)
|
TxProcessor.updateMixin4Doc(doc, mix)
|
||||||
} else {
|
} else {
|
||||||
ctx.error('no document found, failed to apply model transaction, skipping', {
|
ctx.warn('no document found, failed to apply model transaction, skipping', {
|
||||||
_id: tx._id,
|
_id: tx._id,
|
||||||
_class: tx._class,
|
_class: tx._class,
|
||||||
objectId: mix.objectId
|
objectId: mix.objectId
|
||||||
|
@ -23,20 +23,37 @@ import {
|
|||||||
getConfig,
|
getConfig,
|
||||||
registerAdapterFactory,
|
registerAdapterFactory,
|
||||||
registerDestroyFactory,
|
registerDestroyFactory,
|
||||||
registerTxAdapterFactory
|
registerTxAdapterFactory,
|
||||||
|
sharedPipelineContextVars
|
||||||
} from '@hcengineering/server-pipeline'
|
} from '@hcengineering/server-pipeline'
|
||||||
import { join } from 'path'
|
import { join } from 'path'
|
||||||
|
|
||||||
import { createMongoAdapter, createMongoDestroyAdapter, createMongoTxAdapter } from '@hcengineering/mongo'
|
import {
|
||||||
|
createMongoAdapter,
|
||||||
|
createMongoDestroyAdapter,
|
||||||
|
createMongoTxAdapter,
|
||||||
|
shutdownMongo
|
||||||
|
} from '@hcengineering/mongo'
|
||||||
import {
|
import {
|
||||||
createPostgreeDestroyAdapter,
|
createPostgreeDestroyAdapter,
|
||||||
createPostgresAdapter,
|
createPostgresAdapter,
|
||||||
createPostgresTxAdapter,
|
createPostgresTxAdapter,
|
||||||
setDBExtraOptions
|
setDBExtraOptions,
|
||||||
|
shutdownPostgres
|
||||||
} from '@hcengineering/postgres'
|
} from '@hcengineering/postgres'
|
||||||
import { readFileSync } from 'node:fs'
|
import { readFileSync } from 'node:fs'
|
||||||
const model = JSON.parse(readFileSync(process.env.MODEL_JSON ?? 'model.json').toString()) as Tx[]
|
const model = JSON.parse(readFileSync(process.env.MODEL_JSON ?? 'model.json').toString()) as Tx[]
|
||||||
|
|
||||||
|
// Register close on process exit.
|
||||||
|
process.on('exit', () => {
|
||||||
|
shutdownPostgres(sharedPipelineContextVars).catch((err) => {
|
||||||
|
console.error(err)
|
||||||
|
})
|
||||||
|
shutdownMongo(sharedPipelineContextVars).catch((err) => {
|
||||||
|
console.error(err)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
const metricsContext = initStatisticsContext('backup', {
|
const metricsContext = initStatisticsContext('backup', {
|
||||||
factory: () =>
|
factory: () =>
|
||||||
new MeasureMetricsContext(
|
new MeasureMetricsContext(
|
||||||
@ -84,5 +101,6 @@ startBackup(
|
|||||||
externalStorage,
|
externalStorage,
|
||||||
disableTriggers: true
|
disableTriggers: true
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
|
sharedPipelineContextVars
|
||||||
)
|
)
|
||||||
|
@ -30,13 +30,19 @@ import {
|
|||||||
LowLevelMiddleware,
|
LowLevelMiddleware,
|
||||||
ModelMiddleware
|
ModelMiddleware
|
||||||
} from '@hcengineering/middleware'
|
} from '@hcengineering/middleware'
|
||||||
import { createMongoAdapter, createMongoDestroyAdapter, createMongoTxAdapter } from '@hcengineering/mongo'
|
import {
|
||||||
|
createMongoAdapter,
|
||||||
|
createMongoDestroyAdapter,
|
||||||
|
createMongoTxAdapter,
|
||||||
|
shutdownMongo
|
||||||
|
} from '@hcengineering/mongo'
|
||||||
import { PlatformError, setMetadata, unknownError } from '@hcengineering/platform'
|
import { PlatformError, setMetadata, unknownError } from '@hcengineering/platform'
|
||||||
import {
|
import {
|
||||||
createPostgreeDestroyAdapter,
|
createPostgreeDestroyAdapter,
|
||||||
createPostgresAdapter,
|
createPostgresAdapter,
|
||||||
createPostgresTxAdapter,
|
createPostgresTxAdapter,
|
||||||
setDBExtraOptions
|
setDBExtraOptions,
|
||||||
|
shutdownPostgres
|
||||||
} from '@hcengineering/postgres'
|
} from '@hcengineering/postgres'
|
||||||
import serverClientPlugin, { getTransactorEndpoint, getWorkspaceInfo } from '@hcengineering/server-client'
|
import serverClientPlugin, { getTransactorEndpoint, getWorkspaceInfo } from '@hcengineering/server-client'
|
||||||
import serverCore, {
|
import serverCore, {
|
||||||
@ -56,7 +62,8 @@ import {
|
|||||||
registerDestroyFactory,
|
registerDestroyFactory,
|
||||||
registerServerPlugins,
|
registerServerPlugins,
|
||||||
registerStringLoaders,
|
registerStringLoaders,
|
||||||
registerTxAdapterFactory
|
registerTxAdapterFactory,
|
||||||
|
sharedPipelineContextVars
|
||||||
} from '@hcengineering/server-pipeline'
|
} from '@hcengineering/server-pipeline'
|
||||||
import serverToken, { decodeToken, generateToken, type Token } from '@hcengineering/server-token'
|
import serverToken, { decodeToken, generateToken, type Token } from '@hcengineering/server-token'
|
||||||
import cors from '@koa/cors'
|
import cors from '@koa/cors'
|
||||||
@ -104,7 +111,8 @@ class WorkspaceIndexer {
|
|||||||
branding: null,
|
branding: null,
|
||||||
modelDb,
|
modelDb,
|
||||||
hierarchy,
|
hierarchy,
|
||||||
storageAdapter: externalStorage
|
storageAdapter: externalStorage,
|
||||||
|
contextVars: {}
|
||||||
}
|
}
|
||||||
result.pipeline = await createPipeline(ctx, middlewares, context)
|
result.pipeline = await createPipeline(ctx, middlewares, context)
|
||||||
|
|
||||||
@ -204,6 +212,15 @@ interface Search {
|
|||||||
interface Reindex {
|
interface Reindex {
|
||||||
token: string
|
token: string
|
||||||
}
|
}
|
||||||
|
// Register close on process exit.
|
||||||
|
process.on('exit', () => {
|
||||||
|
shutdownPostgres(sharedPipelineContextVars).catch((err) => {
|
||||||
|
console.error(err)
|
||||||
|
})
|
||||||
|
shutdownMongo(sharedPipelineContextVars).catch((err) => {
|
||||||
|
console.error(err)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
export async function startIndexer (
|
export async function startIndexer (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
|
@ -33,21 +33,37 @@ import {
|
|||||||
registerDestroyFactory,
|
registerDestroyFactory,
|
||||||
registerServerPlugins,
|
registerServerPlugins,
|
||||||
registerStringLoaders,
|
registerStringLoaders,
|
||||||
registerTxAdapterFactory
|
registerTxAdapterFactory,
|
||||||
|
sharedPipelineContextVars
|
||||||
} from '@hcengineering/server-pipeline'
|
} from '@hcengineering/server-pipeline'
|
||||||
|
|
||||||
import { createMongoAdapter, createMongoDestroyAdapter, createMongoTxAdapter } from '@hcengineering/mongo'
|
import {
|
||||||
|
createMongoAdapter,
|
||||||
|
createMongoDestroyAdapter,
|
||||||
|
createMongoTxAdapter,
|
||||||
|
shutdownMongo
|
||||||
|
} from '@hcengineering/mongo'
|
||||||
import {
|
import {
|
||||||
createPostgreeDestroyAdapter,
|
createPostgreeDestroyAdapter,
|
||||||
createPostgresAdapter,
|
createPostgresAdapter,
|
||||||
createPostgresTxAdapter,
|
createPostgresTxAdapter,
|
||||||
setDBExtraOptions
|
setDBExtraOptions,
|
||||||
|
shutdownPostgres
|
||||||
} from '@hcengineering/postgres'
|
} from '@hcengineering/postgres'
|
||||||
import { readFileSync } from 'node:fs'
|
import { readFileSync } from 'node:fs'
|
||||||
const model = JSON.parse(readFileSync(process.env.MODEL_JSON ?? 'model.json').toString()) as Tx[]
|
const model = JSON.parse(readFileSync(process.env.MODEL_JSON ?? 'model.json').toString()) as Tx[]
|
||||||
|
|
||||||
registerStringLoaders()
|
registerStringLoaders()
|
||||||
|
|
||||||
|
// Register close on process exit.
|
||||||
|
process.on('exit', () => {
|
||||||
|
shutdownPostgres(sharedPipelineContextVars).catch((err) => {
|
||||||
|
console.error(err)
|
||||||
|
})
|
||||||
|
shutdownMongo(sharedPipelineContextVars).catch((err) => {
|
||||||
|
console.error(err)
|
||||||
|
})
|
||||||
|
})
|
||||||
/**
|
/**
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
|
@ -92,7 +92,7 @@ import {
|
|||||||
toAccountInfo,
|
toAccountInfo,
|
||||||
verifyPassword
|
verifyPassword
|
||||||
} from './utils'
|
} from './utils'
|
||||||
import { getWorkspaceDestroyAdapter } from '@hcengineering/server-pipeline'
|
import { getWorkspaceDestroyAdapter, sharedPipelineContextVars } from '@hcengineering/server-pipeline'
|
||||||
|
|
||||||
import MD5 from 'crypto-js/md5'
|
import MD5 from 'crypto-js/md5'
|
||||||
function buildGravatarId (email: string): string {
|
function buildGravatarId (email: string): string {
|
||||||
@ -2364,7 +2364,7 @@ export async function dropWorkspaceFull (
|
|||||||
const ws = await dropWorkspace(ctx, db, branding, workspaceId)
|
const ws = await dropWorkspace(ctx, db, branding, workspaceId)
|
||||||
|
|
||||||
const adapter = getWorkspaceDestroyAdapter(dbUrl)
|
const adapter = getWorkspaceDestroyAdapter(dbUrl)
|
||||||
await adapter.deleteWorkspace(ctx, { name: ws.workspace })
|
await adapter.deleteWorkspace(ctx, sharedPipelineContextVars, { name: ws.workspace })
|
||||||
|
|
||||||
const wspace = getWorkspaceId(workspaceId)
|
const wspace = getWorkspaceId(workspaceId)
|
||||||
const hasBucket = await storageAdapter?.exists(ctx, wspace)
|
const hasBucket = await storageAdapter?.exists(ctx, wspace)
|
||||||
|
@ -23,6 +23,7 @@ import { MongoAccountDB } from './collections/mongo'
|
|||||||
import { PostgresAccountDB } from './collections/postgres'
|
import { PostgresAccountDB } from './collections/postgres'
|
||||||
import { accountPlugin } from './plugin'
|
import { accountPlugin } from './plugin'
|
||||||
import type { Account, AccountDB, AccountInfo, RegionInfo, WorkspaceInfo } from './types'
|
import type { Account, AccountDB, AccountInfo, RegionInfo, WorkspaceInfo } from './types'
|
||||||
|
import { sharedPipelineContextVars } from '@hcengineering/server-pipeline'
|
||||||
|
|
||||||
export async function getAccountDB (uri: string, dbNs?: string): Promise<[AccountDB, () => void]> {
|
export async function getAccountDB (uri: string, dbNs?: string): Promise<[AccountDB, () => void]> {
|
||||||
const isMongo = uri.startsWith('mongodb://')
|
const isMongo = uri.startsWith('mongodb://')
|
||||||
@ -41,7 +42,7 @@ export async function getAccountDB (uri: string, dbNs?: string): Promise<[Accoun
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
} else {
|
} else {
|
||||||
const client = getDBClient(uri)
|
const client = getDBClient(sharedPipelineContextVars, uri)
|
||||||
const pgClient = await client.getClient()
|
const pgClient = await client.getClient()
|
||||||
// TODO: if dbNs is provided put tables in that schema
|
// TODO: if dbNs is provided put tables in that schema
|
||||||
const pgAccount = new PostgresAccountDB(pgClient)
|
const pgAccount = new PostgresAccountDB(pgClient)
|
||||||
|
@ -37,7 +37,8 @@ export function startBackup (
|
|||||||
workspace: WorkspaceIdWithUrl,
|
workspace: WorkspaceIdWithUrl,
|
||||||
branding: Branding | null,
|
branding: Branding | null,
|
||||||
externalStorage: StorageAdapter
|
externalStorage: StorageAdapter
|
||||||
) => DbConfiguration
|
) => DbConfiguration,
|
||||||
|
contextVars: Record<string, any>
|
||||||
): void {
|
): void {
|
||||||
const config = _config()
|
const config = _config()
|
||||||
setMetadata(serverToken.metadata.Secret, config.Secret)
|
setMetadata(serverToken.metadata.Secret, config.Secret)
|
||||||
@ -66,7 +67,8 @@ export function startBackup (
|
|||||||
(ctx, workspace, branding, externalStorage) => {
|
(ctx, workspace, branding, externalStorage) => {
|
||||||
return getConfig(ctx, mainDbUrl, workspace, branding, externalStorage)
|
return getConfig(ctx, mainDbUrl, workspace, branding, externalStorage)
|
||||||
},
|
},
|
||||||
config.Region
|
config.Region,
|
||||||
|
contextVars
|
||||||
)
|
)
|
||||||
|
|
||||||
process.on('SIGINT', shutdown)
|
process.on('SIGINT', shutdown)
|
||||||
@ -94,6 +96,7 @@ export async function backupWorkspace (
|
|||||||
freshBackup: boolean = false,
|
freshBackup: boolean = false,
|
||||||
clean: boolean = false,
|
clean: boolean = false,
|
||||||
downloadLimit: number,
|
downloadLimit: number,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
|
|
||||||
onFinish?: (backupStorage: StorageAdapter, workspaceStorage: StorageAdapter) => Promise<void>
|
onFinish?: (backupStorage: StorageAdapter, workspaceStorage: StorageAdapter) => Promise<void>
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
@ -130,7 +133,8 @@ export async function backupWorkspace (
|
|||||||
freshBackup,
|
freshBackup,
|
||||||
clean,
|
clean,
|
||||||
downloadLimit,
|
downloadLimit,
|
||||||
[]
|
[],
|
||||||
|
contextVars
|
||||||
)
|
)
|
||||||
if (result && onFinish !== undefined) {
|
if (result && onFinish !== undefined) {
|
||||||
await onFinish(storageAdapter, workspaceStorageAdapter)
|
await onFinish(storageAdapter, workspaceStorageAdapter)
|
||||||
|
@ -66,6 +66,7 @@ class BackupWorker {
|
|||||||
externalStorage: StorageAdapter
|
externalStorage: StorageAdapter
|
||||||
) => DbConfiguration,
|
) => DbConfiguration,
|
||||||
readonly region: string,
|
readonly region: string,
|
||||||
|
readonly contextVars: Record<string, any>,
|
||||||
readonly freshWorkspace: boolean = false,
|
readonly freshWorkspace: boolean = false,
|
||||||
readonly clean: boolean = false,
|
readonly clean: boolean = false,
|
||||||
readonly skipDomains: string[] = []
|
readonly skipDomains: string[] = []
|
||||||
@ -211,6 +212,7 @@ class BackupWorker {
|
|||||||
const modelDb = new ModelDb(hierarchy)
|
const modelDb = new ModelDb(hierarchy)
|
||||||
const txAdapter = await adapterConf.factory(
|
const txAdapter = await adapterConf.factory(
|
||||||
ctx,
|
ctx,
|
||||||
|
this.contextVars,
|
||||||
hierarchy,
|
hierarchy,
|
||||||
adapterConf.url,
|
adapterConf.url,
|
||||||
wsUrl,
|
wsUrl,
|
||||||
@ -218,7 +220,7 @@ class BackupWorker {
|
|||||||
this.workspaceStorageAdapter
|
this.workspaceStorageAdapter
|
||||||
)
|
)
|
||||||
try {
|
try {
|
||||||
await txAdapter.init?.(ctx)
|
await txAdapter.init?.(ctx, this.contextVars)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
await txAdapter.rawFindAll<Tx>(
|
await txAdapter.rawFindAll<Tx>(
|
||||||
@ -292,9 +294,18 @@ export function backupService (
|
|||||||
externalStorage: StorageAdapter
|
externalStorage: StorageAdapter
|
||||||
) => DbConfiguration,
|
) => DbConfiguration,
|
||||||
region: string,
|
region: string,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
recheck?: boolean
|
recheck?: boolean
|
||||||
): () => void {
|
): () => void {
|
||||||
const backupWorker = new BackupWorker(storage, config, pipelineFactory, workspaceStorageAdapter, getConfig, region)
|
const backupWorker = new BackupWorker(
|
||||||
|
storage,
|
||||||
|
config,
|
||||||
|
pipelineFactory,
|
||||||
|
workspaceStorageAdapter,
|
||||||
|
getConfig,
|
||||||
|
region,
|
||||||
|
contextVars
|
||||||
|
)
|
||||||
|
|
||||||
const shutdown = (): void => {
|
const shutdown = (): void => {
|
||||||
void backupWorker.close()
|
void backupWorker.close()
|
||||||
@ -322,6 +333,7 @@ export async function doBackupWorkspace (
|
|||||||
clean: boolean,
|
clean: boolean,
|
||||||
downloadLimit: number,
|
downloadLimit: number,
|
||||||
skipDomains: string[],
|
skipDomains: string[],
|
||||||
|
contextVars: Record<string, any>,
|
||||||
notify?: (progress: number) => Promise<void>
|
notify?: (progress: number) => Promise<void>
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
const backupWorker = new BackupWorker(
|
const backupWorker = new BackupWorker(
|
||||||
@ -331,6 +343,7 @@ export async function doBackupWorkspace (
|
|||||||
workspaceStorageAdapter,
|
workspaceStorageAdapter,
|
||||||
getConfig,
|
getConfig,
|
||||||
region,
|
region,
|
||||||
|
contextVars,
|
||||||
freshWorkspace,
|
freshWorkspace,
|
||||||
clean,
|
clean,
|
||||||
skipDomains
|
skipDomains
|
||||||
|
@ -65,7 +65,12 @@ export type DbAdapterHandler = (
|
|||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
export interface DbAdapter extends LowLevelStorage {
|
export interface DbAdapter extends LowLevelStorage {
|
||||||
init?: (ctx: MeasureContext, domains?: string[], excludeDomains?: string[]) => Promise<void>
|
init?: (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
|
domains?: string[],
|
||||||
|
excludeDomains?: string[]
|
||||||
|
) => Promise<void>
|
||||||
|
|
||||||
helper?: () => DomainHelperOperations
|
helper?: () => DomainHelperOperations
|
||||||
|
|
||||||
@ -103,7 +108,7 @@ export interface TxAdapter extends DbAdapter {
|
|||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
export interface WorkspaceDestroyAdapter {
|
export interface WorkspaceDestroyAdapter {
|
||||||
deleteWorkspace: (ctx: MeasureContext, workspace: WorkspaceId) => Promise<void>
|
deleteWorkspace: (ctx: MeasureContext, contextVars: Record<string, any>, workspace: WorkspaceId) => Promise<void>
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -111,6 +116,7 @@ export interface WorkspaceDestroyAdapter {
|
|||||||
*/
|
*/
|
||||||
export type DbAdapterFactory = (
|
export type DbAdapterFactory = (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
hierarchy: Hierarchy,
|
hierarchy: Hierarchy,
|
||||||
url: string,
|
url: string,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
|
@ -119,6 +119,7 @@ class BenchmarkDbAdapter extends DummyDbAdapter {
|
|||||||
*/
|
*/
|
||||||
export async function createBenchmarkAdapter (
|
export async function createBenchmarkAdapter (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
hierarchy: Hierarchy,
|
hierarchy: Hierarchy,
|
||||||
url: string,
|
url: string,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
|
@ -130,7 +130,9 @@ export class DbAdapterManagerImpl implements DBAdapterManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await ctx.with(`init adapter ${key}`, {}, (ctx) => adapter?.init?.(ctx, domains, excludeDomains))
|
await ctx.with(`init adapter ${key}`, {}, (ctx) =>
|
||||||
|
adapter?.init?.(ctx, this.context.contextVars, domains, excludeDomains)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -177,6 +177,7 @@ class InMemoryAdapter extends DummyDbAdapter implements DbAdapter {
|
|||||||
*/
|
*/
|
||||||
export async function createInMemoryAdapter (
|
export async function createInMemoryAdapter (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
hierarchy: Hierarchy,
|
hierarchy: Hierarchy,
|
||||||
url: string,
|
url: string,
|
||||||
workspaceId: WorkspaceId
|
workspaceId: WorkspaceId
|
||||||
|
@ -22,6 +22,7 @@ import { DummyDbAdapter } from './mem'
|
|||||||
*/
|
*/
|
||||||
export async function createNullAdapter (
|
export async function createNullAdapter (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
hierarchy: Hierarchy,
|
hierarchy: Hierarchy,
|
||||||
url: string,
|
url: string,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
|
@ -181,6 +181,8 @@ export interface PipelineContext {
|
|||||||
derived?: Middleware
|
derived?: Middleware
|
||||||
head?: Middleware
|
head?: Middleware
|
||||||
|
|
||||||
|
contextVars: Record<string, any>
|
||||||
|
|
||||||
broadcastEvent?: (ctx: MeasureContext, tx: Tx[]) => Promise<void>
|
broadcastEvent?: (ctx: MeasureContext, tx: Tx[]) => Promise<void>
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -55,6 +55,7 @@ export class DBAdapterMiddleware extends BaseMiddleware implements Middleware {
|
|||||||
key,
|
key,
|
||||||
await adapterConf.factory(
|
await adapterConf.factory(
|
||||||
ctx,
|
ctx,
|
||||||
|
this.context.contextVars,
|
||||||
this.context.hierarchy,
|
this.context.hierarchy,
|
||||||
adapterConf.url,
|
adapterConf.url,
|
||||||
this.context.workspace,
|
this.context.workspace,
|
||||||
@ -69,7 +70,7 @@ export class DBAdapterMiddleware extends BaseMiddleware implements Middleware {
|
|||||||
|
|
||||||
const txAdapterName = this.conf.domains[DOMAIN_TX]
|
const txAdapterName = this.conf.domains[DOMAIN_TX]
|
||||||
const txAdapter = adapters.get(txAdapterName) as TxAdapter
|
const txAdapter = adapters.get(txAdapterName) as TxAdapter
|
||||||
await txAdapter.init?.(metrics, [DOMAIN_TX, DOMAIN_MODEL_TX])
|
await txAdapter.init?.(metrics, this.context.contextVars, [DOMAIN_TX, DOMAIN_MODEL_TX])
|
||||||
|
|
||||||
const defaultAdapter = adapters.get(this.conf.defaultAdapter)
|
const defaultAdapter = adapters.get(this.conf.defaultAdapter)
|
||||||
if (defaultAdapter === undefined) {
|
if (defaultAdapter === undefined) {
|
||||||
|
@ -37,7 +37,6 @@ import core, {
|
|||||||
type SessionData,
|
type SessionData,
|
||||||
type Tx
|
type Tx
|
||||||
} from '@hcengineering/core'
|
} from '@hcengineering/core'
|
||||||
import { PlatformError, unknownError } from '@hcengineering/platform'
|
|
||||||
import type {
|
import type {
|
||||||
IndexedDoc,
|
IndexedDoc,
|
||||||
Middleware,
|
Middleware,
|
||||||
@ -91,9 +90,6 @@ export class FullTextMiddleware extends BaseMiddleware implements Middleware {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async init (ctx: MeasureContext): Promise<void> {
|
async init (ctx: MeasureContext): Promise<void> {
|
||||||
if (this.context.adapterManager == null) {
|
|
||||||
throw new PlatformError(unknownError('Adapter manager should be specified'))
|
|
||||||
}
|
|
||||||
this.contexts = new Map(
|
this.contexts = new Map(
|
||||||
this.context.modelDb.findAllSync(core.class.FullTextSearchContext, {}).map((it) => [it.toClass, it])
|
this.context.modelDb.findAllSync(core.class.FullTextSearchContext, {}).map((it) => [it.toClass, it])
|
||||||
)
|
)
|
||||||
|
@ -28,7 +28,7 @@ import core, {
|
|||||||
} from '@hcengineering/core'
|
} from '@hcengineering/core'
|
||||||
import { type DbAdapter, wrapAdapterToClient } from '@hcengineering/server-core'
|
import { type DbAdapter, wrapAdapterToClient } from '@hcengineering/server-core'
|
||||||
import { createMongoAdapter, createMongoTxAdapter } from '..'
|
import { createMongoAdapter, createMongoTxAdapter } from '..'
|
||||||
import { getMongoClient, type MongoClientReference, shutdown } from '../utils'
|
import { getMongoClient, type MongoClientReference, shutdownMongo } from '../utils'
|
||||||
import { genMinModel } from './minmodel'
|
import { genMinModel } from './minmodel'
|
||||||
import { createTaskModel, type Task, type TaskComment, taskPlugin } from './tasks'
|
import { createTaskModel, type Task, type TaskComment, taskPlugin } from './tasks'
|
||||||
|
|
||||||
@ -52,7 +52,7 @@ describe('mongo operations', () => {
|
|||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
mongoClient.close()
|
mongoClient.close()
|
||||||
await shutdown()
|
await shutdownMongo()
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@ -80,6 +80,7 @@ describe('mongo operations', () => {
|
|||||||
const mctx = new MeasureMetricsContext('', {})
|
const mctx = new MeasureMetricsContext('', {})
|
||||||
const txStorage = await createMongoTxAdapter(
|
const txStorage = await createMongoTxAdapter(
|
||||||
new MeasureMetricsContext('', {}),
|
new MeasureMetricsContext('', {}),
|
||||||
|
{},
|
||||||
hierarchy,
|
hierarchy,
|
||||||
mongodbUri,
|
mongodbUri,
|
||||||
getWorkspaceId(dbId),
|
getWorkspaceId(dbId),
|
||||||
@ -88,6 +89,7 @@ describe('mongo operations', () => {
|
|||||||
|
|
||||||
serverStorage = await createMongoAdapter(
|
serverStorage = await createMongoAdapter(
|
||||||
new MeasureMetricsContext('', {}),
|
new MeasureMetricsContext('', {}),
|
||||||
|
{},
|
||||||
hierarchy,
|
hierarchy,
|
||||||
mongodbUri,
|
mongodbUri,
|
||||||
getWorkspaceId(dbId),
|
getWorkspaceId(dbId),
|
||||||
|
@ -22,7 +22,7 @@ export * from './utils'
|
|||||||
|
|
||||||
export function createMongoDestroyAdapter (url: string): WorkspaceDestroyAdapter {
|
export function createMongoDestroyAdapter (url: string): WorkspaceDestroyAdapter {
|
||||||
return {
|
return {
|
||||||
deleteWorkspace: async (ctx, workspace): Promise<void> => {
|
deleteWorkspace: async (ctx, contextVars, workspace): Promise<void> => {
|
||||||
const client = getMongoClient(url)
|
const client = getMongoClient(url)
|
||||||
try {
|
try {
|
||||||
await ctx.with('delete-workspace', {}, async () => {
|
await ctx.with('delete-workspace', {}, async () => {
|
||||||
|
@ -1807,6 +1807,7 @@ function translateLikeQuery (pattern: string): { $regex: string, $options: strin
|
|||||||
*/
|
*/
|
||||||
export async function createMongoAdapter (
|
export async function createMongoAdapter (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
hierarchy: Hierarchy,
|
hierarchy: Hierarchy,
|
||||||
url: string,
|
url: string,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
@ -1825,6 +1826,7 @@ export async function createMongoAdapter (
|
|||||||
*/
|
*/
|
||||||
export async function createMongoTxAdapter (
|
export async function createMongoTxAdapter (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
hierarchy: Hierarchy,
|
hierarchy: Hierarchy,
|
||||||
url: string,
|
url: string,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
|
@ -27,19 +27,12 @@ import { MongoClient, type Collection, type Db, type Document } from 'mongodb'
|
|||||||
|
|
||||||
const connections = new Map<string, MongoClientReferenceImpl>()
|
const connections = new Map<string, MongoClientReferenceImpl>()
|
||||||
|
|
||||||
// Register mongo close on process exit.
|
|
||||||
process.on('exit', () => {
|
|
||||||
shutdown().catch((err) => {
|
|
||||||
console.error(err)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
const clientRefs = new Map<string, ClientRef>()
|
const clientRefs = new Map<string, ClientRef>()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
export async function shutdown (): Promise<void> {
|
export async function shutdownMongo (contextVars: Record<string, any> = {}): Promise<void> {
|
||||||
for (const it of Array.from(clientRefs.values())) {
|
for (const it of Array.from(clientRefs.values())) {
|
||||||
console.error((it as any).stack)
|
console.error((it as any).stack)
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ import core, {
|
|||||||
} from '@hcengineering/core'
|
} from '@hcengineering/core'
|
||||||
import { type DbAdapter, wrapAdapterToClient } from '@hcengineering/server-core'
|
import { type DbAdapter, wrapAdapterToClient } from '@hcengineering/server-core'
|
||||||
import { createPostgresAdapter, createPostgresTxAdapter } from '..'
|
import { createPostgresAdapter, createPostgresTxAdapter } from '..'
|
||||||
import { getDBClient, type PostgresClientReference, shutdown } from '../utils'
|
import { getDBClient, type PostgresClientReference, shutdownPostgres } from '../utils'
|
||||||
import { genMinModel } from './minmodel'
|
import { genMinModel } from './minmodel'
|
||||||
import { createTaskModel, type Task, type TaskComment, taskPlugin } from './tasks'
|
import { createTaskModel, type Task, type TaskComment, taskPlugin } from './tasks'
|
||||||
|
|
||||||
@ -35,12 +35,14 @@ const txes = genMinModel()
|
|||||||
|
|
||||||
createTaskModel(txes)
|
createTaskModel(txes)
|
||||||
|
|
||||||
|
const contextVars: Record<string, any> = {}
|
||||||
|
|
||||||
describe('postgres operations', () => {
|
describe('postgres operations', () => {
|
||||||
const baseDbUri: string = process.env.DB_URL ?? 'postgresql://postgres:example@localhost:5433'
|
const baseDbUri: string = process.env.DB_URL ?? 'postgresql://postgres:example@localhost:5433'
|
||||||
let dbId: string = 'pg_testdb_' + generateId()
|
let dbId: string = 'pg_testdb_' + generateId()
|
||||||
let dbUuid: string = crypto.randomUUID()
|
let dbUuid: string = crypto.randomUUID()
|
||||||
let dbUri: string = baseDbUri + '/' + dbId
|
let dbUri: string = baseDbUri + '/' + dbId
|
||||||
const clientRef: PostgresClientReference = getDBClient(baseDbUri)
|
const clientRef: PostgresClientReference = getDBClient(contextVars, baseDbUri)
|
||||||
let hierarchy: Hierarchy
|
let hierarchy: Hierarchy
|
||||||
let model: ModelDb
|
let model: ModelDb
|
||||||
let client: Client
|
let client: Client
|
||||||
@ -49,7 +51,7 @@ describe('postgres operations', () => {
|
|||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
clientRef.close()
|
clientRef.close()
|
||||||
await shutdown()
|
await shutdownPostgres(contextVars)
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@ -88,6 +90,7 @@ describe('postgres operations', () => {
|
|||||||
const mctx = new MeasureMetricsContext('', {})
|
const mctx = new MeasureMetricsContext('', {})
|
||||||
const txStorage = await createPostgresTxAdapter(
|
const txStorage = await createPostgresTxAdapter(
|
||||||
mctx,
|
mctx,
|
||||||
|
contextVars,
|
||||||
hierarchy,
|
hierarchy,
|
||||||
dbUri,
|
dbUri,
|
||||||
{
|
{
|
||||||
@ -107,6 +110,7 @@ describe('postgres operations', () => {
|
|||||||
const ctx = new MeasureMetricsContext('client', {})
|
const ctx = new MeasureMetricsContext('client', {})
|
||||||
const serverStorage = await createPostgresAdapter(
|
const serverStorage = await createPostgresAdapter(
|
||||||
ctx,
|
ctx,
|
||||||
|
contextVars,
|
||||||
hierarchy,
|
hierarchy,
|
||||||
dbUri,
|
dbUri,
|
||||||
{
|
{
|
||||||
@ -115,7 +119,7 @@ describe('postgres operations', () => {
|
|||||||
},
|
},
|
||||||
model
|
model
|
||||||
)
|
)
|
||||||
await serverStorage.init?.(ctx)
|
await serverStorage.init?.(ctx, contextVars)
|
||||||
client = await createClient(async (handler) => {
|
client = await createClient(async (handler) => {
|
||||||
return wrapAdapterToClient(ctx, serverStorage, txes)
|
return wrapAdapterToClient(ctx, serverStorage, txes)
|
||||||
})
|
})
|
||||||
|
@ -19,12 +19,20 @@ import { getDBClient, retryTxn } from './utils'
|
|||||||
|
|
||||||
export { getDocFieldsByDomains, translateDomain } from './schemas'
|
export { getDocFieldsByDomains, translateDomain } from './schemas'
|
||||||
export * from './storage'
|
export * from './storage'
|
||||||
export { convertDoc, createTables, getDBClient, retryTxn, setDBExtraOptions, setExtraOptions } from './utils'
|
export {
|
||||||
|
convertDoc,
|
||||||
|
createTables,
|
||||||
|
getDBClient,
|
||||||
|
retryTxn,
|
||||||
|
setDBExtraOptions,
|
||||||
|
setExtraOptions,
|
||||||
|
shutdownPostgres
|
||||||
|
} from './utils'
|
||||||
|
|
||||||
export function createPostgreeDestroyAdapter (url: string): WorkspaceDestroyAdapter {
|
export function createPostgreeDestroyAdapter (url: string): WorkspaceDestroyAdapter {
|
||||||
return {
|
return {
|
||||||
deleteWorkspace: async (ctx, workspace): Promise<void> => {
|
deleteWorkspace: async (ctx, contextVars, workspace): Promise<void> => {
|
||||||
const client = getDBClient(url)
|
const client = getDBClient(contextVars, url)
|
||||||
try {
|
try {
|
||||||
const connection = await client.getClient()
|
const connection = await client.getClient()
|
||||||
|
|
||||||
|
@ -93,7 +93,6 @@ import {
|
|||||||
parseUpdate,
|
parseUpdate,
|
||||||
type PostgresClientReference
|
type PostgresClientReference
|
||||||
} from './utils'
|
} from './utils'
|
||||||
|
|
||||||
async function * createCursorGenerator (
|
async function * createCursorGenerator (
|
||||||
client: postgres.ReservedSql,
|
client: postgres.ReservedSql,
|
||||||
sql: string,
|
sql: string,
|
||||||
@ -192,11 +191,10 @@ class ConnectionInfo {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const connections = new Map<string, ConnectionInfo>()
|
|
||||||
|
|
||||||
class ConnectionMgr {
|
class ConnectionMgr {
|
||||||
constructor (
|
constructor (
|
||||||
protected readonly client: postgres.Sql,
|
protected readonly client: postgres.Sql,
|
||||||
|
protected readonly connections: () => Map<string, ConnectionInfo>,
|
||||||
readonly mgrId: string
|
readonly mgrId: string
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
@ -296,10 +294,10 @@ class ConnectionMgr {
|
|||||||
}
|
}
|
||||||
|
|
||||||
release (id: string): void {
|
release (id: string): void {
|
||||||
const conn = connections.get(id)
|
const conn = this.connections().get(id)
|
||||||
if (conn !== undefined) {
|
if (conn !== undefined) {
|
||||||
conn.released = true
|
conn.released = true
|
||||||
connections.delete(id) // We need to delete first
|
this.connections().delete(id) // We need to delete first
|
||||||
conn.release()
|
conn.release()
|
||||||
} else {
|
} else {
|
||||||
console.log('wrne')
|
console.log('wrne')
|
||||||
@ -307,10 +305,11 @@ class ConnectionMgr {
|
|||||||
}
|
}
|
||||||
|
|
||||||
close (): void {
|
close (): void {
|
||||||
for (const [k, conn] of Array.from(connections.entries()).filter(
|
const cnts = this.connections()
|
||||||
|
for (const [k, conn] of Array.from(cnts.entries()).filter(
|
||||||
([, it]: [string, ConnectionInfo]) => it.mgrId === this.mgrId
|
([, it]: [string, ConnectionInfo]) => it.mgrId === this.mgrId
|
||||||
)) {
|
)) {
|
||||||
connections.delete(k)
|
cnts.delete(k)
|
||||||
try {
|
try {
|
||||||
conn.release()
|
conn.release()
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
@ -320,12 +319,12 @@ class ConnectionMgr {
|
|||||||
}
|
}
|
||||||
|
|
||||||
getConnection (id: string, managed: boolean = true): ConnectionInfo {
|
getConnection (id: string, managed: boolean = true): ConnectionInfo {
|
||||||
let conn = connections.get(id)
|
let conn = this.connections().get(id)
|
||||||
if (conn === undefined) {
|
if (conn === undefined) {
|
||||||
conn = new ConnectionInfo(this.mgrId, id, this.client, managed)
|
conn = new ConnectionInfo(this.mgrId, id, this.client, managed)
|
||||||
}
|
}
|
||||||
if (managed) {
|
if (managed) {
|
||||||
connections.set(id, conn)
|
this.connections().set(id, conn)
|
||||||
}
|
}
|
||||||
return conn
|
return conn
|
||||||
}
|
}
|
||||||
@ -407,6 +406,8 @@ abstract class PostgresAdapterBase implements DbAdapter {
|
|||||||
protected readonly tableFields = new Map<string, string[]>()
|
protected readonly tableFields = new Map<string, string[]>()
|
||||||
protected readonly workspaceId: WorkspaceId
|
protected readonly workspaceId: WorkspaceId
|
||||||
|
|
||||||
|
protected connections = new Map<string, ConnectionInfo>()
|
||||||
|
|
||||||
mgr: ConnectionMgr
|
mgr: ConnectionMgr
|
||||||
|
|
||||||
constructor (
|
constructor (
|
||||||
@ -422,7 +423,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
|
|||||||
name: enrichedWorkspaceId.uuid ?? enrichedWorkspaceId.name
|
name: enrichedWorkspaceId.uuid ?? enrichedWorkspaceId.name
|
||||||
}
|
}
|
||||||
this._helper = new DBCollectionHelper(this.client, this.workspaceId)
|
this._helper = new DBCollectionHelper(this.client, this.workspaceId)
|
||||||
this.mgr = new ConnectionMgr(client, mgrId)
|
this.mgr = new ConnectionMgr(client, () => this.connections, mgrId)
|
||||||
}
|
}
|
||||||
|
|
||||||
reserveContext (id: string): () => void {
|
reserveContext (id: string): () => void {
|
||||||
@ -430,7 +431,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
|
|||||||
return () => {
|
return () => {
|
||||||
conn.released = true
|
conn.released = true
|
||||||
conn.release()
|
conn.release()
|
||||||
connections.delete(id) // We need to delete first
|
this.connections.delete(id) // We need to delete first
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -477,7 +478,12 @@ abstract class PostgresAdapterBase implements DbAdapter {
|
|||||||
|
|
||||||
on?: ((handler: DbAdapterHandler) => void) | undefined
|
on?: ((handler: DbAdapterHandler) => void) | undefined
|
||||||
|
|
||||||
abstract init (ctx: MeasureContext, domains?: string[], excludeDomains?: string[]): Promise<void>
|
abstract init (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
|
domains?: string[],
|
||||||
|
excludeDomains?: string[]
|
||||||
|
): Promise<void>
|
||||||
|
|
||||||
async close (): Promise<void> {
|
async close (): Promise<void> {
|
||||||
this.mgr.close()
|
this.mgr.close()
|
||||||
@ -1672,7 +1678,14 @@ interface OperationBulk {
|
|||||||
const initRateLimit = new RateLimiter(1)
|
const initRateLimit = new RateLimiter(1)
|
||||||
|
|
||||||
class PostgresAdapter extends PostgresAdapterBase {
|
class PostgresAdapter extends PostgresAdapterBase {
|
||||||
async init (ctx: MeasureContext, domains?: string[], excludeDomains?: string[]): Promise<void> {
|
async init (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
|
domains?: string[],
|
||||||
|
excludeDomains?: string[]
|
||||||
|
): Promise<void> {
|
||||||
|
this.connections = contextVars.cntInfoPG ?? new Map<string, ConnectionInfo>()
|
||||||
|
contextVars.cntInfoPG = this.connections
|
||||||
let resultDomains = domains ?? this.hierarchy.domains()
|
let resultDomains = domains ?? this.hierarchy.domains()
|
||||||
if (excludeDomains !== undefined) {
|
if (excludeDomains !== undefined) {
|
||||||
resultDomains = resultDomains.filter((it) => !excludeDomains.includes(it))
|
resultDomains = resultDomains.filter((it) => !excludeDomains.includes(it))
|
||||||
@ -1977,7 +1990,15 @@ class PostgresAdapter extends PostgresAdapterBase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
class PostgresTxAdapter extends PostgresAdapterBase implements TxAdapter {
|
class PostgresTxAdapter extends PostgresAdapterBase implements TxAdapter {
|
||||||
async init (ctx: MeasureContext, domains?: string[], excludeDomains?: string[]): Promise<void> {
|
async init (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
|
domains?: string[],
|
||||||
|
excludeDomains?: string[]
|
||||||
|
): Promise<void> {
|
||||||
|
this.connections = contextVars.cntInfoPG ?? new Map<string, ConnectionInfo>()
|
||||||
|
contextVars.cntInfoPG = this.connections
|
||||||
|
|
||||||
const resultDomains = domains ?? [DOMAIN_TX, DOMAIN_MODEL_TX]
|
const resultDomains = domains ?? [DOMAIN_TX, DOMAIN_MODEL_TX]
|
||||||
await initRateLimit.exec(async () => {
|
await initRateLimit.exec(async () => {
|
||||||
const url = this.refClient.url()
|
const url = this.refClient.url()
|
||||||
@ -2035,12 +2056,13 @@ class PostgresTxAdapter extends PostgresAdapterBase implements TxAdapter {
|
|||||||
*/
|
*/
|
||||||
export async function createPostgresAdapter (
|
export async function createPostgresAdapter (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
hierarchy: Hierarchy,
|
hierarchy: Hierarchy,
|
||||||
url: string,
|
url: string,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
modelDb: ModelDb
|
modelDb: ModelDb
|
||||||
): Promise<DbAdapter> {
|
): Promise<DbAdapter> {
|
||||||
const client = getDBClient(url)
|
const client = getDBClient(contextVars, url)
|
||||||
const connection = await client.getClient()
|
const connection = await client.getClient()
|
||||||
return new PostgresAdapter(connection, client, workspaceId, hierarchy, modelDb, 'default-' + workspaceId.name)
|
return new PostgresAdapter(connection, client, workspaceId, hierarchy, modelDb, 'default-' + workspaceId.name)
|
||||||
}
|
}
|
||||||
@ -2050,12 +2072,13 @@ export async function createPostgresAdapter (
|
|||||||
*/
|
*/
|
||||||
export async function createPostgresTxAdapter (
|
export async function createPostgresTxAdapter (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
hierarchy: Hierarchy,
|
hierarchy: Hierarchy,
|
||||||
url: string,
|
url: string,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
modelDb: ModelDb
|
modelDb: ModelDb
|
||||||
): Promise<TxAdapter> {
|
): Promise<TxAdapter> {
|
||||||
const client = getDBClient(url)
|
const client = getDBClient(contextVars, url)
|
||||||
const connection = await client.getClient()
|
const connection = await client.getClient()
|
||||||
return new PostgresTxAdapter(connection, client, workspaceId, hierarchy, modelDb, 'tx' + workspaceId.name)
|
return new PostgresTxAdapter(connection, client, workspaceId, hierarchy, modelDb, 'tx' + workspaceId.name)
|
||||||
}
|
}
|
||||||
|
@ -43,15 +43,6 @@ import {
|
|||||||
translateDomain
|
translateDomain
|
||||||
} from './schemas'
|
} from './schemas'
|
||||||
|
|
||||||
const connections = new Map<string, PostgresClientReferenceImpl>()
|
|
||||||
|
|
||||||
// Register close on process exit.
|
|
||||||
process.on('exit', () => {
|
|
||||||
shutdown().catch((err) => {
|
|
||||||
console.error(err)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
const clientRefs = new Map<string, ClientRef>()
|
const clientRefs = new Map<string, ClientRef>()
|
||||||
const loadedDomains = new Set<string>()
|
const loadedDomains = new Set<string>()
|
||||||
|
|
||||||
@ -195,7 +186,12 @@ async function createTable (client: postgres.Sql, domain: string): Promise<void>
|
|||||||
/**
|
/**
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
export async function shutdown (): Promise<void> {
|
export async function shutdownPostgres (contextVars: Record<string, any>): Promise<void> {
|
||||||
|
const connections: Map<string, PostgresClientReferenceImpl> | undefined =
|
||||||
|
contextVars.pgConnections ?? new Map<string, PostgresClientReferenceImpl>()
|
||||||
|
if (connections === undefined) {
|
||||||
|
return
|
||||||
|
}
|
||||||
for (const c of connections.values()) {
|
for (const c of connections.values()) {
|
||||||
c.close(true)
|
c.close(true)
|
||||||
}
|
}
|
||||||
@ -305,9 +301,16 @@ export function setExtraOptions (options: DBExtraOptions): void {
|
|||||||
* Initialize a workspace connection to DB
|
* Initialize a workspace connection to DB
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
export function getDBClient (connectionString: string, database?: string): PostgresClientReference {
|
export function getDBClient (
|
||||||
|
contextVars: Record<string, any>,
|
||||||
|
connectionString: string,
|
||||||
|
database?: string
|
||||||
|
): PostgresClientReference {
|
||||||
const extraOptions = JSON.parse(process.env.POSTGRES_OPTIONS ?? '{}')
|
const extraOptions = JSON.parse(process.env.POSTGRES_OPTIONS ?? '{}')
|
||||||
const key = `${connectionString}${extraOptions}`
|
const key = `${connectionString}${extraOptions}`
|
||||||
|
const connections = contextVars.pgConnections ?? new Map<string, PostgresClientReferenceImpl>()
|
||||||
|
contextVars.pgConnections = connections
|
||||||
|
|
||||||
let existing = connections.get(key)
|
let existing = connections.get(key)
|
||||||
|
|
||||||
if (existing === undefined) {
|
if (existing === undefined) {
|
||||||
|
@ -62,7 +62,15 @@ class StorageBlobAdapter implements DbAdapter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
init?: ((ctx: MeasureContext, domains?: string[], excludeDomains?: string[]) => Promise<void>) | undefined
|
init?:
|
||||||
|
| ((
|
||||||
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
|
domains?: string[],
|
||||||
|
excludeDomains?: string[]
|
||||||
|
) => Promise<void>)
|
||||||
|
| undefined
|
||||||
|
|
||||||
on?: ((handler: DbAdapterHandler) => void) | undefined
|
on?: ((handler: DbAdapterHandler) => void) | undefined
|
||||||
|
|
||||||
async rawFindAll<T extends Doc>(domain: Domain, query: DocumentQuery<T>, options?: FindOptions<T>): Promise<T[]> {
|
async rawFindAll<T extends Doc>(domain: Domain, query: DocumentQuery<T>, options?: FindOptions<T>): Promise<T[]> {
|
||||||
@ -130,6 +138,7 @@ class StorageBlobAdapter implements DbAdapter {
|
|||||||
*/
|
*/
|
||||||
export async function createStorageDataAdapter (
|
export async function createStorageDataAdapter (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
|
contextVars: Record<string, any>,
|
||||||
hierarchy: Hierarchy,
|
hierarchy: Hierarchy,
|
||||||
url: string,
|
url: string,
|
||||||
workspaceId: WorkspaceId,
|
workspaceId: WorkspaceId,
|
||||||
|
@ -77,6 +77,12 @@ export function getTxAdapterFactory (
|
|||||||
return adapter.factory
|
return adapter.factory
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A pipelice context used by standalong services to hold global variables.
|
||||||
|
* In case of Durable Objects, it should not be shared and individual context should be created.
|
||||||
|
*/
|
||||||
|
export const sharedPipelineContextVars: Record<string, any> = {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
@ -94,6 +100,7 @@ export function createServerPipeline (
|
|||||||
externalStorage: StorageAdapter
|
externalStorage: StorageAdapter
|
||||||
|
|
||||||
extraLogging?: boolean // If passed, will log every request/etc.
|
extraLogging?: boolean // If passed, will log every request/etc.
|
||||||
|
pipelineContextVars?: Record<string, any>
|
||||||
},
|
},
|
||||||
extensions?: Partial<DbConfiguration>
|
extensions?: Partial<DbConfiguration>
|
||||||
): PipelineFactory {
|
): PipelineFactory {
|
||||||
@ -137,7 +144,8 @@ export function createServerPipeline (
|
|||||||
branding,
|
branding,
|
||||||
modelDb,
|
modelDb,
|
||||||
hierarchy,
|
hierarchy,
|
||||||
storageAdapter: opt.externalStorage
|
storageAdapter: opt.externalStorage,
|
||||||
|
contextVars: opt.pipelineContextVars ?? sharedPipelineContextVars
|
||||||
}
|
}
|
||||||
return createPipeline(ctx, middlewares, context)
|
return createPipeline(ctx, middlewares, context)
|
||||||
}
|
}
|
||||||
@ -183,7 +191,8 @@ export function createBackupPipeline (
|
|||||||
branding,
|
branding,
|
||||||
modelDb,
|
modelDb,
|
||||||
hierarchy,
|
hierarchy,
|
||||||
storageAdapter: opt.externalStorage
|
storageAdapter: opt.externalStorage,
|
||||||
|
contextVars: {}
|
||||||
}
|
}
|
||||||
return createPipeline(ctx, middlewares, context)
|
return createPipeline(ctx, middlewares, context)
|
||||||
}
|
}
|
||||||
|
@ -102,7 +102,7 @@ export class ClientSession implements Session {
|
|||||||
|
|
||||||
async loadModel (ctx: ClientSessionCtx, lastModelTx: Timestamp, hash?: string): Promise<void> {
|
async loadModel (ctx: ClientSessionCtx, lastModelTx: Timestamp, hash?: string): Promise<void> {
|
||||||
this.includeSessionContext(ctx.ctx, ctx.pipeline)
|
this.includeSessionContext(ctx.ctx, ctx.pipeline)
|
||||||
const result = await ctx.ctx.with('load-model', {}, () => ctx.pipeline.loadModel(ctx.ctx, lastModelTx, hash))
|
const result = await ctx.ctx.with('load-model', {}, (_ctx) => ctx.pipeline.loadModel(_ctx, lastModelTx, hash))
|
||||||
await ctx.sendResponse(ctx.requestId, result)
|
await ctx.sendResponse(ctx.requestId, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,8 +40,18 @@ import { FileModelLogger, prepareTools } from '@hcengineering/server-tool'
|
|||||||
import path from 'path'
|
import path from 'path'
|
||||||
|
|
||||||
import { Analytics } from '@hcengineering/analytics'
|
import { Analytics } from '@hcengineering/analytics'
|
||||||
import { createMongoAdapter, createMongoDestroyAdapter, createMongoTxAdapter } from '@hcengineering/mongo'
|
import {
|
||||||
import { createPostgreeDestroyAdapter, createPostgresAdapter, createPostgresTxAdapter } from '@hcengineering/postgres'
|
createMongoAdapter,
|
||||||
|
createMongoDestroyAdapter,
|
||||||
|
createMongoTxAdapter,
|
||||||
|
shutdownMongo
|
||||||
|
} from '@hcengineering/mongo'
|
||||||
|
import {
|
||||||
|
createPostgreeDestroyAdapter,
|
||||||
|
createPostgresAdapter,
|
||||||
|
createPostgresTxAdapter,
|
||||||
|
shutdownPostgres
|
||||||
|
} from '@hcengineering/postgres'
|
||||||
import { doBackupWorkspace, doRestoreWorkspace } from '@hcengineering/server-backup'
|
import { doBackupWorkspace, doRestoreWorkspace } from '@hcengineering/server-backup'
|
||||||
import type { PipelineFactory, StorageAdapter } from '@hcengineering/server-core'
|
import type { PipelineFactory, StorageAdapter } from '@hcengineering/server-core'
|
||||||
import {
|
import {
|
||||||
@ -52,7 +62,8 @@ import {
|
|||||||
registerDestroyFactory,
|
registerDestroyFactory,
|
||||||
registerServerPlugins,
|
registerServerPlugins,
|
||||||
registerStringLoaders,
|
registerStringLoaders,
|
||||||
registerTxAdapterFactory
|
registerTxAdapterFactory,
|
||||||
|
sharedPipelineContextVars
|
||||||
} from '@hcengineering/server-pipeline'
|
} from '@hcengineering/server-pipeline'
|
||||||
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
|
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
|
||||||
import { createWorkspace, upgradeWorkspace } from './ws-operations'
|
import { createWorkspace, upgradeWorkspace } from './ws-operations'
|
||||||
@ -72,6 +83,16 @@ export interface WorkspaceOptions {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Register close on process exit.
|
||||||
|
process.on('exit', () => {
|
||||||
|
shutdownPostgres(sharedPipelineContextVars).catch((err) => {
|
||||||
|
console.error(err)
|
||||||
|
})
|
||||||
|
shutdownMongo(sharedPipelineContextVars).catch((err) => {
|
||||||
|
console.error(err)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
export type WorkspaceOperation = 'create' | 'upgrade' | 'all' | 'all+backup'
|
export type WorkspaceOperation = 'create' | 'upgrade' | 'all' | 'all+backup'
|
||||||
|
|
||||||
export class WorkspaceWorker {
|
export class WorkspaceWorker {
|
||||||
@ -348,7 +369,7 @@ export class WorkspaceWorker {
|
|||||||
async doCleanup (ctx: MeasureContext, workspace: BaseWorkspaceInfo): Promise<void> {
|
async doCleanup (ctx: MeasureContext, workspace: BaseWorkspaceInfo): Promise<void> {
|
||||||
const { dbUrl } = prepareTools([])
|
const { dbUrl } = prepareTools([])
|
||||||
const adapter = getWorkspaceDestroyAdapter(dbUrl)
|
const adapter = getWorkspaceDestroyAdapter(dbUrl)
|
||||||
await adapter.deleteWorkspace(ctx, { name: workspace.workspace })
|
await adapter.deleteWorkspace(ctx, sharedPipelineContextVars, { name: workspace.workspace })
|
||||||
}
|
}
|
||||||
|
|
||||||
private async doWorkspaceOperation (
|
private async doWorkspaceOperation (
|
||||||
@ -500,6 +521,7 @@ export class WorkspaceWorker {
|
|||||||
archive,
|
archive,
|
||||||
50000,
|
50000,
|
||||||
['blob'],
|
['blob'],
|
||||||
|
sharedPipelineContextVars,
|
||||||
(_p: number) => {
|
(_p: number) => {
|
||||||
if (progress !== Math.round(_p)) {
|
if (progress !== Math.round(_p)) {
|
||||||
progress = Math.round(_p)
|
progress = Math.round(_p)
|
||||||
|
@ -16,7 +16,7 @@ import core, {
|
|||||||
import { consoleModelLogger, type MigrateOperation, type ModelLogger } from '@hcengineering/model'
|
import { consoleModelLogger, type MigrateOperation, type ModelLogger } from '@hcengineering/model'
|
||||||
import { getTransactorEndpoint } from '@hcengineering/server-client'
|
import { getTransactorEndpoint } from '@hcengineering/server-client'
|
||||||
import { SessionDataImpl, wrapPipeline, type Pipeline, type StorageAdapter } from '@hcengineering/server-core'
|
import { SessionDataImpl, wrapPipeline, type Pipeline, type StorageAdapter } from '@hcengineering/server-core'
|
||||||
import { getServerPipeline, getTxAdapterFactory } from '@hcengineering/server-pipeline'
|
import { getServerPipeline, getTxAdapterFactory, sharedPipelineContextVars } from '@hcengineering/server-pipeline'
|
||||||
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
|
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
|
||||||
import { generateToken } from '@hcengineering/server-token'
|
import { generateToken } from '@hcengineering/server-token'
|
||||||
import { initializeWorkspace, initModel, prepareTools, updateModel, upgradeModel } from '@hcengineering/server-tool'
|
import { initializeWorkspace, initModel, prepareTools, updateModel, upgradeModel } from '@hcengineering/server-tool'
|
||||||
@ -82,7 +82,7 @@ export async function createWorkspace (
|
|||||||
externalStorage: storageAdapter,
|
externalStorage: storageAdapter,
|
||||||
usePassedCtx: true
|
usePassedCtx: true
|
||||||
})
|
})
|
||||||
const txAdapter = await txFactory(ctx, hierarchy, dbUrl, wsId, modelDb, storageAdapter)
|
const txAdapter = await txFactory(ctx, sharedPipelineContextVars, hierarchy, dbUrl, wsId, modelDb, storageAdapter)
|
||||||
await childLogger.withLog('init-workspace', {}, (ctx) =>
|
await childLogger.withLog('init-workspace', {}, (ctx) =>
|
||||||
initModel(ctx, wsId, txes, txAdapter, storageAdapter, ctxModellogger, async (value) => {})
|
initModel(ctx, wsId, txes, txAdapter, storageAdapter, ctxModellogger, async (value) => {})
|
||||||
)
|
)
|
||||||
|
@ -54,10 +54,10 @@ import { CloudFlareLogger } from './logger'
|
|||||||
import model from './model.json'
|
import model from './model.json'
|
||||||
// import { configureAnalytics } from '@hcengineering/analytics-service'
|
// import { configureAnalytics } from '@hcengineering/analytics-service'
|
||||||
// import { Analytics } from '@hcengineering/analytics'
|
// import { Analytics } from '@hcengineering/analytics'
|
||||||
|
import contactPlugin from '@hcengineering/contact'
|
||||||
import serverAiBot from '@hcengineering/server-ai-bot'
|
import serverAiBot from '@hcengineering/server-ai-bot'
|
||||||
import serverNotification from '@hcengineering/server-notification'
|
import serverNotification from '@hcengineering/server-notification'
|
||||||
import serverTelegram from '@hcengineering/server-telegram'
|
import serverTelegram from '@hcengineering/server-telegram'
|
||||||
import contactPlugin from '@hcengineering/contact'
|
|
||||||
|
|
||||||
export const PREFERRED_SAVE_SIZE = 500
|
export const PREFERRED_SAVE_SIZE = 500
|
||||||
export const PREFERRED_SAVE_INTERVAL = 30 * 1000
|
export const PREFERRED_SAVE_INTERVAL = 30 * 1000
|
||||||
@ -75,6 +75,8 @@ export class Transactor extends DurableObject<Env> {
|
|||||||
|
|
||||||
private readonly sessions = new Map<WebSocket, WebsocketData>()
|
private readonly sessions = new Map<WebSocket, WebsocketData>()
|
||||||
|
|
||||||
|
private readonly contextVars: Record<string, any> = {}
|
||||||
|
|
||||||
constructor (ctx: DurableObjectState, env: Env) {
|
constructor (ctx: DurableObjectState, env: Env) {
|
||||||
super(ctx, env)
|
super(ctx, env)
|
||||||
|
|
||||||
@ -135,11 +137,12 @@ export class Transactor extends DurableObject<Env> {
|
|||||||
adapterSecurity: false,
|
adapterSecurity: false,
|
||||||
disableTriggers: false,
|
disableTriggers: false,
|
||||||
fulltextUrl: env.FULLTEXT_URL,
|
fulltextUrl: env.FULLTEXT_URL,
|
||||||
extraLogging: true
|
extraLogging: true,
|
||||||
|
pipelineContextVars: this.contextVars
|
||||||
})
|
})
|
||||||
const result = await pipeline(ctx, ws, upgrade, broadcast, branding)
|
const result = await pipeline(ctx, ws, upgrade, broadcast, branding)
|
||||||
|
|
||||||
const client = getDBClient(dbUrl)
|
const client = getDBClient(this.contextVars, dbUrl)
|
||||||
const connection = await client.getClient()
|
const connection = await client.getClient()
|
||||||
const t1 = Date.now()
|
const t1 = Date.now()
|
||||||
await connection`select now()`
|
await connection`select now()`
|
||||||
|
Loading…
Reference in New Issue
Block a user