UBERF-9158: Use parameters in SQL queries (#7690)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2025-01-17 17:23:13 +07:00 committed by GitHub
parent 08e4ee5ec5
commit f1849628a8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 26582 additions and 24984 deletions

File diff suppressed because it is too large Load Diff

View File

@ -48,5 +48,5 @@
"UPLOAD_CONFIG": "",
"UPLOAD_URL": "https://dl.hc.engineering/upload/form-data/:workspace",
"TRANSACTOR_OVERRIDE": "wss://cloud-transactor.andrey-47f.workers.dev"
"TRANSACTOR_OVERRIDE": "wss://transactor00.hc.engineering"
}

View File

@ -6,4 +6,4 @@ COPY bundle/bundle.js ./
COPY bundle/bundle.js.map ./
EXPOSE 8080
CMD node bundle.js
CMD [ "node", "./bundle.js" ]

View File

@ -122,7 +122,7 @@ const close = (): void => {
}
process.on('unhandledRejection', (reason, promise) => {
metricsContext.error('Unhandled Rejection at:', { origin, promise })
metricsContext.error('Unhandled Rejection at:', { reason, promise })
})
global.process.on('uncaughtException', (error, origin) => {

View File

@ -37,7 +37,12 @@ import {
} from '@hcengineering/server-pipeline'
import { createMongoAdapter, createMongoDestroyAdapter, createMongoTxAdapter } from '@hcengineering/mongo'
import { createPostgreeDestroyAdapter, createPostgresAdapter, createPostgresTxAdapter } from '@hcengineering/postgres'
import {
createPostgreeDestroyAdapter,
createPostgresAdapter,
createPostgresTxAdapter,
setDbUnsafePrepareOptions
} from '@hcengineering/postgres'
import { readFileSync } from 'node:fs'
const model = JSON.parse(readFileSync(process.env.MODEL_JSON ?? 'model.json').toString()) as Tx[]
@ -76,6 +81,15 @@ export function start (
registerAdapterFactory('postgresql', createPostgresAdapter, true)
registerDestroyFactory('postgresql', createPostgreeDestroyAdapter, true)
const usePrepare = process.env.DB_PREPARE === 'true'
setDbUnsafePrepareOptions({
find: usePrepare,
model: false,
update: usePrepare,
upload: usePrepare
})
registerServerPlugins()
const externalStorage = buildStorageFromConfig(opt.storageConfig)

View File

@ -19,7 +19,7 @@ import { getDBClient, retryTxn } from './utils'
export { getDocFieldsByDomains, translateDomain } from './schemas'
export * from './storage'
export { convertDoc, createTables, getDBClient, retryTxn } from './utils'
export { convertDoc, createTables, getDBClient, retryTxn, setDBExtraOptions, setDbUnsafePrepareOptions } from './utils'
export function createPostgreeDestroyAdapter (url: string): WorkspaceDestroyAdapter {
return {
@ -33,7 +33,9 @@ export function createPostgreeDestroyAdapter (url: string): WorkspaceDestroyAdap
for (const [domain] of Object.entries(domainSchemas)) {
await ctx.with('delete-workspace-domain', {}, async () => {
await retryTxn(connection, async (client) => {
await client`delete from ${connection(domain)} where "workspaceId" = '${connection(workspace.uuid ?? workspace.name)}'`
await client.unsafe(`delete from ${domain} where "workspaceId" = $1::uuid`, [
workspace.uuid ?? workspace.name
])
})
})
}

View File

@ -80,7 +80,7 @@ import {
createTables,
DBCollectionHelper,
type DBDoc,
escapeBackticks,
dbUnsafePrepareOptions,
getDBClient,
inferType,
isDataField,
@ -96,10 +96,11 @@ import {
async function * createCursorGenerator (
client: postgres.ReservedSql,
sql: string,
params: any,
schema: Schema,
bulkSize = 1000
): AsyncGenerator<Doc[]> {
const cursor = client.unsafe(sql).cursor(bulkSize)
const cursor = client.unsafe(sql, params).cursor(bulkSize)
try {
let docs: Doc[] = []
for await (const part of cursor) {
@ -240,7 +241,7 @@ class ConnectionMgr {
}
}
async read (id: string | undefined, fn: (client: postgres.Sql | postgres.ReservedSql) => Promise<any>): Promise<any> {
async retry (id: string | undefined, fn: (client: postgres.Sql | postgres.ReservedSql) => Promise<any>): Promise<any> {
const backoffInterval = 25 // millis
const maxTries = 5
let tries = 0
@ -316,6 +317,50 @@ class ConnectionMgr {
}
}
class ValuesVariables {
index: number = 1
values: any[] = []
valueHashes = new Map<string, string>()
add (value: any, type: string = ''): string {
// Compact value if string and same
if (typeof value === 'string') {
const v = this.valueHashes.get(value + ':' + type)
if (v !== undefined) {
return v
}
this.values.push(value)
const idx = type !== '' ? `$${this.index++}${type}` : `$${this.index++}`
this.valueHashes.set(value + ':' + type, idx)
return idx
} else {
this.values.push(value)
return type !== '' ? `$${this.index++}${type}` : `$${this.index++}`
}
}
getValues (): any[] {
return this.values
}
addArray (value: any[], type: string = ''): string {
return this.add(
value.filter((it) => it != null),
type
)
}
addArrayI (value: any[], type: string = ''): string[] {
const vals = value.filter((it) => it != null)
const vv: string[] = []
vals.forEach((it) => {
vv.push(this.add(it, type))
})
return vv
}
}
abstract class PostgresAdapterBase implements DbAdapter {
protected readonly _helper: DBCollectionHelper
protected readonly tableFields = new Map<string, string[]>()
@ -358,8 +403,9 @@ abstract class PostgresAdapterBase implements DbAdapter {
const tdomain = translateDomain(_domain)
const vars = new ValuesVariables()
const sqlChunks: string[] = [`SELECT * FROM ${tdomain}`]
sqlChunks.push(`WHERE ${this.buildRawQuery(tdomain, query, options)}`)
sqlChunks.push(`WHERE ${this.buildRawQuery(vars, tdomain, query, options)}`)
if (options?.sort !== undefined) {
sqlChunks.push(this.buildRawOrder(tdomain, options.sort))
}
@ -368,7 +414,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
}
const finalSql: string = sqlChunks.join(' ')
const cursor: AsyncGenerator<Doc[]> = createCursorGenerator(client, finalSql, schema)
const cursor: AsyncGenerator<Doc[]> = createCursorGenerator(client, finalSql, vars.getValues(), schema)
return {
next: async (count: number): Promise<T[] | null> => {
const result = await cursor.next()
@ -399,9 +445,10 @@ abstract class PostgresAdapterBase implements DbAdapter {
async rawFindAll<T extends Doc>(_domain: Domain, query: DocumentQuery<T>, options?: FindOptions<T>): Promise<T[]> {
const domain = translateDomain(_domain)
const vars = new ValuesVariables()
const select = `SELECT ${this.getProjection(domain, options?.projection, [], options?.associations)} FROM ${domain}`
const sqlChunks: string[] = []
sqlChunks.push(`WHERE ${this.buildRawQuery(domain, query, options)}`)
sqlChunks.push(`WHERE ${this.buildRawQuery(vars, domain, query, options)}`)
if (options?.sort !== undefined) {
sqlChunks.push(this.buildRawOrder(domain, options.sort))
}
@ -409,7 +456,11 @@ abstract class PostgresAdapterBase implements DbAdapter {
sqlChunks.push(`LIMIT ${options.limit}`)
}
const finalSql: string = [select, ...sqlChunks].join(' ')
const result: DBDoc[] = await this.mgr.read(undefined, (client) => client.unsafe(finalSql))
const result: DBDoc[] = await this.mgr.retry(undefined, (client) =>
client.unsafe(finalSql, vars.getValues(), {
prepare: dbUnsafePrepareOptions.find
})
)
return result.map((p) => parseDocWithProjection(p, domain, options?.projection))
}
@ -429,13 +480,18 @@ abstract class PostgresAdapterBase implements DbAdapter {
return `ORDER BY ${res.join(', ')}`
}
buildRawQuery<T extends Doc>(domain: string, query: DocumentQuery<T>, options?: FindOptions<T>): string {
buildRawQuery<T extends Doc>(
vars: ValuesVariables,
domain: string,
query: DocumentQuery<T>,
options?: FindOptions<T>
): string {
const res: string[] = []
res.push(`"workspaceId" = '${this.workspaceId.name}'`)
res.push(`"workspaceId" = ${vars.add(this.workspaceId.name, '::uuid')}`)
for (const key in query) {
const value = query[key]
const tkey = this.transformKey(domain, core.class.Doc, key, false)
const translated = this.translateQueryValue(tkey, value, 'common')
const translated = this.translateQueryValue(vars, tkey, value, 'common')
if (translated !== undefined) {
res.push(translated)
}
@ -448,7 +504,8 @@ abstract class PostgresAdapterBase implements DbAdapter {
query: DocumentQuery<T>,
operations: DocumentUpdate<T>
): Promise<void> {
const translatedQuery = this.buildRawQuery(domain, query)
const vars = new ValuesVariables()
const translatedQuery = this.buildRawQuery(vars, domain, query)
if ((operations as any).$set !== undefined) {
;(operations as any) = { ...(operations as any).$set }
}
@ -459,7 +516,13 @@ abstract class PostgresAdapterBase implements DbAdapter {
const schemaFields = getSchemaAndFields(domain)
if (isOps) {
await this.mgr.write(undefined, async (client) => {
const res = await client.unsafe(`SELECT * FROM ${translateDomain(domain)} WHERE ${translatedQuery} FOR UPDATE`)
const res = await client.unsafe(
`SELECT * FROM ${translateDomain(domain)} WHERE ${translatedQuery} FOR UPDATE`,
vars.getValues(),
{
prepare: dbUnsafePrepareOptions.find
}
)
const docs = res.map((p) => parseDoc(p as any, schemaFields.schema))
for (const doc of docs) {
if (doc === undefined) continue
@ -467,8 +530,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
TxProcessor.applyUpdate(doc, operations)
;(doc as any)['%hash%'] = this.curHash()
const converted = convertDoc(domain, doc, this.workspaceId.name, schemaFields)
const params: any[] = [doc._id, this.workspaceId.name]
let paramsIndex = params.length + 1
const params = new ValuesVariables()
const updates: string[] = []
const { extractedFields, remainingData } = parseUpdate(operations, schemaFields)
const newAttachedTo = (doc as any).attachedTo
@ -476,21 +538,24 @@ abstract class PostgresAdapterBase implements DbAdapter {
for (const key in extractedFields) {
const val = (extractedFields as any)[key]
if (key === 'attachedTo' && val === prevAttachedTo) continue
updates.push(`"${key}" = $${paramsIndex++}`)
params.push(val)
updates.push(`"${key}" = ${params.add(val)}`)
}
} else if (prevAttachedTo !== undefined && prevAttachedTo !== newAttachedTo) {
updates.push(`"attachedTo" = $${paramsIndex++}`)
params.push(newAttachedTo)
updates.push(`"attachedTo" = ${params.add(newAttachedTo)}`)
}
if (Object.keys(remainingData).length > 0) {
updates.push(`data = $${paramsIndex++}`)
params.push(converted.data)
updates.push(`data = ${params.add(converted.data, '::json')}`)
}
await client.unsafe(
`UPDATE ${translateDomain(domain)} SET ${updates.join(', ')} WHERE "workspaceId" = $2 AND _id = $1`,
params
`UPDATE ${translateDomain(domain)}
SET ${updates.join(', ')}
WHERE "workspaceId" = ${params.add(this.workspaceId.name, '::uuid')}
AND _id = ${params.add(doc._id, '::text')}`,
params.getValues(),
{
prepare: dbUnsafePrepareOptions.update
}
)
}
})
@ -505,40 +570,43 @@ abstract class PostgresAdapterBase implements DbAdapter {
operations: DocumentUpdate<T>,
schemaFields: SchemaAndFields
): Promise<void> {
const translatedQuery = this.buildRawQuery(domain, query)
const vars = new ValuesVariables()
const translatedQuery = this.buildRawQuery(vars, domain, query)
const updates: string[] = []
const params: any[] = []
let paramsIndex = params.length + 1
const { extractedFields, remainingData } = parseUpdate(operations, schemaFields)
const { space, attachedTo, ...ops } = operations as any
for (const key in extractedFields) {
updates.push(`"${key}" = $${paramsIndex++}`)
params.push((extractedFields as any)[key])
updates.push(`"${key}" = ${vars.add((extractedFields as any)[key])}`)
}
let from = 'data'
let dataUpdated = false
for (const key in remainingData) {
if (ops[key] === undefined) continue
const val = (remainingData as any)[key]
from = `jsonb_set(${from}, '{${key}}', coalesce(to_jsonb($${paramsIndex++}${inferType(val)}), 'null') , true)`
params.push(val)
from = `jsonb_set(${from}, '{${key}}', coalesce(to_jsonb(${vars.add(val)}${inferType(val)}), 'null') , true)`
dataUpdated = true
}
if (dataUpdated) {
updates.push(`data = ${from}`)
}
await this.mgr.write(undefined, async (client) => {
await this.mgr.retry(undefined, async (client) => {
await client.unsafe(
`UPDATE ${translateDomain(domain)} SET ${updates.join(', ')} WHERE ${translatedQuery}`,
params
`UPDATE ${translateDomain(domain)} SET ${updates.join(', ')} WHERE ${translatedQuery};`,
vars.getValues(),
{
prepare: dbUnsafePrepareOptions.find
}
)
})
}
async rawDeleteMany<T extends Doc>(domain: Domain, query: DocumentQuery<T>): Promise<void> {
const translatedQuery = this.buildRawQuery(domain, query)
await this.mgr.write(undefined, async (client) => {
await client.unsafe(`DELETE FROM ${translateDomain(domain)} WHERE ${translatedQuery}`)
const vars = new ValuesVariables()
const translatedQuery = this.buildRawQuery(vars, domain, query)
await this.mgr.retry(undefined, async (client) => {
await client.unsafe(`DELETE FROM ${translateDomain(domain)} WHERE ${translatedQuery}`, vars.getValues(), {
prepare: dbUnsafePrepareOptions.update
})
})
}
@ -549,6 +617,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
options?: ServerFindOptions<T>
): Promise<FindResult<T>> {
let fquery = ''
const vars = new ValuesVariables()
return ctx.with(
'findAll',
{},
@ -556,6 +625,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
try {
const domain = translateDomain(options?.domain ?? this.hierarchy.getDomain(_class))
const sqlChunks: string[] = []
const joins = this.buildJoin(_class, options?.lookup)
if (options?.domainLookup !== undefined) {
const baseDomain = translateDomain(this.hierarchy.getDomain(_class))
@ -574,15 +644,17 @@ abstract class PostgresAdapterBase implements DbAdapter {
toClass: undefined
})
}
// Add workspace name as $1
const select = `SELECT ${this.getProjection(domain, options?.projection, joins, options?.associations)} FROM ${domain}`
const secJoin = this.addSecurity(query, domain, ctx.contextData)
const secJoin = this.addSecurity(vars, query, domain, ctx.contextData)
if (secJoin !== undefined) {
sqlChunks.push(secJoin)
}
if (joins.length > 0) {
sqlChunks.push(this.buildJoinString(joins))
}
sqlChunks.push(`WHERE ${this.buildQuery(_class, domain, query, joins, options)}`)
sqlChunks.push(`WHERE ${this.buildQuery(vars, _class, domain, query, joins, options)}`)
const totalSqlChunks = [...sqlChunks]
@ -593,19 +665,23 @@ abstract class PostgresAdapterBase implements DbAdapter {
sqlChunks.push(`LIMIT ${options.limit}`)
}
return (await this.mgr.read(ctx.id, async (connection) => {
return (await this.mgr.retry(ctx.id, async (connection) => {
let total = options?.total === true ? 0 : -1
if (options?.total === true) {
const totalReq = `SELECT COUNT(${domain}._id) as count FROM ${domain}`
const totalSql = [totalReq, ...totalSqlChunks].join(' ')
const totalResult = await connection.unsafe(totalSql)
const totalResult = await connection.unsafe(totalSql, vars.getValues(), {
prepare: dbUnsafePrepareOptions.find
})
const parsed = Number.parseInt(totalResult[0].count)
total = Number.isNaN(parsed) ? 0 : parsed
}
const finalSql: string = [select, ...sqlChunks].join(' ')
fquery = finalSql
const result = await connection.unsafe(finalSql)
const result = await connection.unsafe(finalSql, vars.getValues(), {
prepare: dbUnsafePrepareOptions.find
})
if (
options?.lookup === undefined &&
options?.domainLookup === undefined &&
@ -625,11 +701,16 @@ abstract class PostgresAdapterBase implements DbAdapter {
throw err
}
},
() => ({ fquery })
() => ({ fquery, vars: vars.getValues() })
)
}
addSecurity<T extends Doc>(query: DocumentQuery<T>, domain: string, sessionContext: SessionData): string | undefined {
addSecurity<T extends Doc>(
vars: ValuesVariables,
query: DocumentQuery<T>,
domain: string,
sessionContext: SessionData
): string | undefined {
if (sessionContext !== undefined && sessionContext.isTriggerCtx !== true) {
if (sessionContext.admin !== true && sessionContext.account !== undefined) {
const acc = sessionContext.account
@ -641,7 +722,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
const key = domain === DOMAIN_SPACE ? '_id' : domain === DOMAIN_TX ? "data ->> 'objectSpace'" : 'space'
const privateCheck = domain === DOMAIN_SPACE ? ' OR sec.private = false' : ''
const q = `(sec.members @> '{"${acc._id}"}' OR sec."_class" = '${core.class.SystemSpace}'${privateCheck})`
return `INNER JOIN ${translateDomain(DOMAIN_SPACE)} AS sec ON sec._id = ${domain}.${key} AND sec."workspaceId" = '${this.workspaceId.name}' AND ${q}`
return `INNER JOIN ${translateDomain(DOMAIN_SPACE)} AS sec ON sec._id = ${domain}.${key} AND sec."workspaceId" = ${vars.add(this.workspaceId.name, '::uuid')} AND ${q}`
}
}
}
@ -962,6 +1043,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
}
private buildQuery<T extends Doc>(
vars: ValuesVariables,
_class: Ref<Class<T>>,
baseDomain: string,
_query: DocumentQuery<T>,
@ -970,7 +1052,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
): string {
const res: string[] = []
const query = { ..._query }
res.push(`${baseDomain}."workspaceId" = '${this.workspaceId.name}'`)
res.push(`${baseDomain}."workspaceId" = ${vars.add(this.workspaceId.name, '::uuid')}`)
if (options?.skipClass !== true) {
query._class = this.fillClass(_class, query) as any
}
@ -985,7 +1067,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
if (value === undefined) continue
const valueType = this.getValueType(_class, key)
const tkey = this.getKey(_class, baseDomain, key, joins, valueType === 'dataArray')
const translated = this.translateQueryValue(tkey, value, valueType)
const translated = this.translateQueryValue(vars, tkey, value, valueType)
if (translated !== undefined) {
res.push(translated)
}
@ -1145,76 +1227,84 @@ abstract class PostgresAdapterBase implements DbAdapter {
return key
}
private translateQueryValue (tkey: string, value: any, type: ValueType): string | undefined {
private translateQueryValue (vars: ValuesVariables, tkey: string, value: any, type: ValueType): string | undefined {
const tkeyData = tkey.includes('data->') || tkey.includes('data#>>')
if (tkeyData && (Array.isArray(value) || (typeof value !== 'object' && typeof value !== 'string'))) {
value = Array.isArray(value) ? value.map((it) => (it == null ? null : `${it}`)) : `${value}`
}
if (value === null) {
return `${tkey} IS NULL`
} else if (typeof value === 'object' && !Array.isArray(value)) {
// we can have multiple criteria for one field
const res: string[] = []
for (const operator in value) {
const val = value[operator]
let val = value[operator]
if (tkeyData && (Array.isArray(val) || (typeof val !== 'object' && typeof val !== 'string'))) {
val = Array.isArray(val) ? val.map((it) => (it == null ? null : `${it}`)) : `${val}`
}
switch (operator) {
case '$ne':
if (val === null) {
res.push(`${tkey} IS NOT NULL`)
} else {
res.push(`${tkey} != '${val}'`)
res.push(`${tkey} != ${vars.add(val, inferType(val))}`)
}
break
case '$gt':
res.push(`${tkey} > '${val}'`)
res.push(`${tkey} > ${vars.add(val, inferType(val))}`)
break
case '$gte':
res.push(`${tkey} >= '${val}'`)
res.push(`${tkey} >= ${vars.add(val, inferType(val))}`)
break
case '$lt':
res.push(`${tkey} < '${val}'`)
res.push(`${tkey} < ${vars.add(val, inferType(val))}`)
break
case '$lte':
res.push(`${tkey} <= '${val}'`)
res.push(`${tkey} <= ${vars.add(val, inferType(val))}`)
break
case '$in':
switch (type) {
case 'common':
if (Array.isArray(val) && val.includes(null)) {
res.push(
`(${tkey} IN (${val
.filter((it) => it != null)
.map((v: any) => `'${v}'`)
.join(', ')}) OR ${tkey} IS NULL)`
)
res.push(`(${tkey} = ANY(${vars.addArray(val, inferType(val))})) OR ${tkey} IS NULL)`)
} else {
res.push(`${tkey} IN (${val.length > 0 ? val.map((v: any) => `'${v}'`).join(', ') : 'NULL'})`)
if (val.length > 0) {
res.push(`${tkey} = ANY(${vars.addArray(val, inferType(val))})`)
} else {
res.push(`${tkey} IN ('NULL')`)
}
}
break
case 'array':
res.push(`${tkey} && array[${val.length > 0 ? val.map((v: any) => `'${v}'`).join(', ') : 'NULL'}]`)
{
const vv = vars.addArrayI(val)
res.push(`${tkey} && array[${vv.length > 0 ? vv.join(', ') : 'NULL'}]`)
}
break
case 'dataArray':
res.push(`${tkey} ?| array[${val.length > 0 ? val.map((v: any) => `'${v}'`).join(', ') : 'NULL'}]`)
{
const vv = vars.addArrayI(val)
res.push(`${tkey} ?| array[${vv.length > 0 ? vv.join(', ') : 'NULL'}]`)
}
break
}
break
case '$nin':
if (Array.isArray(val) && val.includes(null)) {
res.push(
`(${tkey} NOT IN (${val
.filter((it) => it != null)
.map((v: any) => `'${v}'`)
.join(', ')}) AND ${tkey} IS NOT NULL)`
)
} else if (val.length > 0) {
res.push(`${tkey} NOT IN (${val.map((v: any) => `'${v}'`).join(', ')})`)
res.push(`(${tkey} != ALL(${vars.addArray(val, inferType(val))}) AND ${tkey} IS NOT NULL)`)
} else if (Array.isArray(val) && val.length > 0) {
res.push(`${tkey} != ALL(${vars.addArray(val, inferType(val))})`)
}
break
case '$like':
res.push(`${tkey} ILIKE '${escapeBackticks(val)}'`)
res.push(`${tkey} ILIKE ${vars.add(val, inferType(val))}`)
break
case '$exists':
res.push(`${tkey} IS ${val === true ? 'NOT NULL' : 'NULL'}`)
res.push(`${tkey} IS ${val === true || val === 'true' ? 'NOT NULL' : 'NULL'}`)
break
case '$regex':
res.push(`${tkey} SIMILAR TO '${escapeBackticks(val)}'`)
res.push(`${tkey} SIMILAR TO ${vars.add(val, inferType(val))}`)
break
case '$options':
break
@ -1228,8 +1318,9 @@ abstract class PostgresAdapterBase implements DbAdapter {
}
return res.length === 0 ? undefined : res.join(' AND ')
}
return type === 'common'
? `${tkey} = '${escapeBackticks(value)}'`
? `${tkey} = ${vars.add(value, inferType(value))}`
: type === 'array'
? `${tkey} @> '${typeof value === 'string' ? '{"' + value + '"}' : value}'`
: `${tkey} @> '${typeof value === 'string' ? '"' + value + '"' : value}'`
@ -1367,7 +1458,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
function createBulk (projection: string, limit = 50000): AsyncGenerator<Doc[]> {
const sql = `SELECT ${projection} FROM ${tdomain} WHERE "workspaceId" = '${workspaceId.name}'`
return createCursorGenerator(client, sql, schema, limit)
return createCursorGenerator(client, sql, undefined, schema, limit)
}
let bulk: AsyncGenerator<Doc[]>
@ -1408,15 +1499,21 @@ abstract class PostgresAdapterBase implements DbAdapter {
return []
}
return await this.mgr.read('', async (client) => {
const res =
await client`SELECT * FROM ${client(translateDomain(domain))} WHERE "workspaceId" = ${this.workspaceId.name} AND _id = ANY(${docs})`
return await this.mgr.retry('', async (client) => {
const res = await client.unsafe(
`SELECT * FROM ${translateDomain(domain)}
WHERE "workspaceId" = $1::uuid AND _id = ANY($2::text[])`,
[this.workspaceId.name, docs],
{
prepare: dbUnsafePrepareOptions.find
}
)
return res.map((p) => parseDocWithProjection(p as any, domain))
})
})
}
upload (ctx: MeasureContext, domain: Domain, docs: Doc[]): Promise<void> {
upload (ctx: MeasureContext, domain: Domain, docs: Doc[], handleConflicts: boolean = true): Promise<void> {
return ctx.with('upload', { domain }, async (ctx) => {
const schemaFields = getSchemaAndFields(domain)
const filedsWithData = [...schemaFields.fields, 'data']
@ -1424,7 +1521,9 @@ abstract class PostgresAdapterBase implements DbAdapter {
const onConflict: string[] = []
for (const field of filedsWithData) {
insertFields.push(`"${field}"`)
onConflict.push(`"${field}" = EXCLUDED."${field}"`)
if (handleConflicts) {
onConflict.push(`"${field}" = EXCLUDED."${field}"`)
}
}
const insertStr = insertFields.join(', ')
const onConflictStr = onConflict.join(', ')
@ -1434,9 +1533,9 @@ abstract class PostgresAdapterBase implements DbAdapter {
const tdomain = translateDomain(domain)
while (toUpload.length > 0) {
const part = toUpload.splice(0, 200)
const values: any[] = []
const values = new ValuesVariables()
const vars: string[] = []
let index = 1
const wsId = values.add(this.workspaceId.name, '::uuid')
for (let i = 0; i < part.length; i++) {
const doc = part[i]
const variables: string[] = []
@ -1445,28 +1544,41 @@ abstract class PostgresAdapterBase implements DbAdapter {
;(doc as any)['%hash%'] = this.curHash() // We need to set current hash
}
const d = convertDoc(domain, doc, this.workspaceId.name, schemaFields)
values.push(d.workspaceId)
variables.push(`$${index++}`)
variables.push(wsId)
for (const field of schemaFields.fields) {
values.push(d[field])
variables.push(`$${index++}`)
variables.push(values.add(d[field], `::${schemaFields.schema[field].type}`))
}
values.push(d.data)
variables.push(`$${index++}`)
variables.push(values.add(d.data, '::json'))
vars.push(`(${variables.join(', ')})`)
}
const vals = vars.join(',')
await this.mgr.write(
ctx.id,
async (client) =>
await client.unsafe(
`INSERT INTO ${tdomain} ("workspaceId", ${insertStr}) VALUES ${vals}
ON CONFLICT ("workspaceId", _id) DO UPDATE SET ${onConflictStr};`,
values
)
)
if (handleConflicts) {
await this.mgr.retry(
ctx.id,
async (client) =>
await client.unsafe(
`INSERT INTO ${tdomain} ("workspaceId", ${insertStr}) VALUES ${vals}
ON CONFLICT ("workspaceId", _id) DO UPDATE SET ${onConflictStr};`,
values.getValues(),
{
prepare: dbUnsafePrepareOptions.upload
}
)
)
} else {
await this.mgr.retry(
ctx.id,
async (client) =>
await client.unsafe(
`INSERT INTO ${tdomain} ("workspaceId", ${insertStr}) VALUES ${vals};`,
values.getValues(),
{
prepare: dbUnsafePrepareOptions.upload
}
)
)
}
}
} catch (err: any) {
ctx.error('failed to upload', { err })
@ -1481,10 +1593,14 @@ abstract class PostgresAdapterBase implements DbAdapter {
while (toClean.length > 0) {
const part = toClean.splice(0, 2500)
await ctx.with('clean', {}, () => {
return this.mgr.write(
ctx.id,
(client) =>
client`DELETE FROM ${client(tdomain)} WHERE "workspaceId" = ${this.workspaceId.name} AND _id = ANY(${part})`
return this.mgr.retry(ctx.id, (client) =>
client.unsafe(
`DELETE FROM ${tdomain} WHERE "workspaceId" = $1 AND _id = ANY($2::text[])`,
[this.workspaceId.name, part],
{
prepare: dbUnsafePrepareOptions.upload
}
)
)
})
}
@ -1499,10 +1615,12 @@ abstract class PostgresAdapterBase implements DbAdapter {
const key = isDataField(domain, field) ? `data ->> '${field}'` : `"${field}"`
return ctx.with('groupBy', { domain }, async (ctx) => {
try {
return await this.mgr.read(ctx.id, async (connection) => {
const result = await connection.unsafe(
`SELECT DISTINCT ${key} as ${field}, Count(*) AS count FROM ${translateDomain(domain)} WHERE ${this.buildRawQuery(domain, query ?? {})} GROUP BY ${key}`
)
const vars = new ValuesVariables()
const finalSql = `SELECT DISTINCT ${key} as ${field}, Count(*) AS count FROM ${translateDomain(domain)} WHERE ${this.buildRawQuery(vars, domain, query ?? {})} GROUP BY ${key}`
return await this.mgr.retry(ctx.id, async (connection) => {
const result = await connection.unsafe(finalSql, vars.getValues(), {
prepare: dbUnsafePrepareOptions.find
})
return new Map(result.map((r) => [r[field.toLocaleLowerCase()], parseInt(r.count)]))
})
} catch (err) {
@ -1526,31 +1644,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
@withContext('insert')
async insert (ctx: MeasureContext, domain: string, docs: Doc[]): Promise<TxResult> {
const schemaFields = getSchemaAndFields(domain)
const filedsWithData = [...schemaFields.fields, 'data']
const columns: string[] = ['workspaceId']
for (const field of filedsWithData) {
columns.push(field)
}
while (docs.length > 0) {
const part = docs.splice(0, 500)
const values: DBDoc[] = []
for (let i = 0; i < part.length; i++) {
const doc = part[i]
if ((doc as any)['%hash%'] == null) {
;(doc as any)['%hash%'] = this.curHash()
}
const d = convertDoc(domain, doc, this.workspaceId.name, schemaFields)
values.push(d)
}
await this.mgr.write(ctx.id, async (client) => {
try {
await client`INSERT INTO ${client(translateDomain(domain))} ${client(values, columns)}`
} catch (err: any) {
console.error('inserting error', err)
}
})
}
await this.upload(ctx, domain as Domain, docs, false)
return {}
}
}
@ -1611,15 +1705,26 @@ class PostgresAdapter extends PostgresAdapterBase {
const domain = this.hierarchy.getDomain(tx.objectClass)
const converted = convertDoc(domain, doc, this.workspaceId.name, schemaFields)
const { extractedFields } = parseUpdate(tx.attributes as Partial<Doc>, schemaFields)
const columns = new Set<string>()
for (const key in extractedFields) {
columns.add(key)
const params = new ValuesVariables()
const wsId = params.add(this.workspaceId.name, '::uuid')
const oId = params.add(tx.objectId, '::text')
const updates: string[] = []
for (const key of new Set([...Object.keys(extractedFields), ...['modifiedOn', 'modifiedBy', '%hash%']])) {
const val = (doc as any)[key]
updates.push(`"${key}" = ${params.add(val, `::${schemaFields.schema[key].type}`)}`)
}
columns.add('modifiedBy')
columns.add('modifiedOn')
columns.add('data')
columns.add('%hash%')
await client`UPDATE ${client(translateDomain(domain))} SET ${client(converted, Array.from(columns))} WHERE "workspaceId" = ${this.workspaceId.name} AND _id = ${tx.objectId}`
updates.push(`data = ${params.add(converted.data, '::json')}`)
await client.unsafe(
`UPDATE ${translateDomain(domain)}
SET ${updates.join(', ')}
WHERE "workspaceId" = ${wsId} AND _id = ${oId}`,
params.getValues(),
{
prepare: dbUnsafePrepareOptions.update
}
)
})
})
return {}
@ -1696,7 +1801,7 @@ class PostgresAdapter extends PostgresAdapterBase {
const withoutOperator = byOperator.get(false)
const result: TxResult[] = []
const tdomain = translateDomain(domain)
for (const tx of withOperator ?? []) {
let doc: Doc | undefined
const ops: any = { '%hash%': this.curHash(), ...tx.operations }
@ -1710,18 +1815,31 @@ class PostgresAdapter extends PostgresAdapterBase {
TxProcessor.applyUpdate(doc, ops)
;(doc as any)['%hash%'] = this.curHash()
const converted = convertDoc(domain, doc, this.workspaceId.name, schemaFields)
const columns: string[] = []
const updates: string[] = []
const params = new ValuesVariables()
const { extractedFields, remainingData } = parseUpdate(ops, schemaFields)
for (const key in extractedFields) {
columns.push(key)
const wsId = params.add(this.workspaceId.name, '::uuid')
const oId = params.add(tx.objectId, '::text')
for (const key of new Set([...Object.keys(extractedFields), ...['modifiedOn', 'modifiedBy', '%hash%']])) {
const val = (doc as any)[key]
updates.push(`"${key}" = ${params.add(val, `::${schemaFields.schema[key].type}`)}`)
}
if (Object.keys(remainingData).length > 0) {
columns.push('data')
updates.push(`data = ${params.add(converted.data, '::json')}`)
}
if (!columns.includes('%hash%')) {
columns.push('%hash%')
}
await client`UPDATE ${client(translateDomain(domain))} SET ${client(converted, columns)} WHERE "workspaceId" = ${this.workspaceId.name} AND _id = ${tx.objectId}`
await client.unsafe(
`UPDATE ${tdomain}
SET ${updates.join(', ')}
WHERE "workspaceId" = ${wsId}
AND _id = ${oId}`,
params.getValues(),
{
prepare: dbUnsafePrepareOptions.update
}
)
})
if (tx.retrieve === true && doc !== undefined) {
return { object: doc }
@ -1754,7 +1872,7 @@ class PostgresAdapter extends PostgresAdapterBase {
for (const tx of txes) {
const fields: string[] = ['modifiedBy', 'modifiedOn', '%hash%']
const updates: string[] = ['"modifiedBy" = $1', '"modifiedOn" = $2', '"%hash%" = $3']
const updates: string[] = ['"modifiedBy" = $2', '"modifiedOn" = $3', '"%hash%" = $4']
const params: any[] = [tx.modifiedBy, tx.modifiedOn, null]
let paramsIndex = params.length
const { extractedFields, remainingData } = parseUpdate(tx.operations, schemaFields)
@ -1793,9 +1911,10 @@ class PostgresAdapter extends PostgresAdapterBase {
for (const upds of updates.values()) {
while (upds.length > 0) {
const part = upds.splice(0, 200)
let idx = 0
let idx = 1
const indexes: string[] = []
const data: any[] = []
data.push(this.workspaceId.name)
for (const op of part) {
indexes.push(
`($${++idx}::${schema._id.type ?? 'text'}, ${op.fields.map((it) => (it === 'data' ? `$${++idx}::jsonb` : `$${++idx}::${schema[it].type ?? 'text'}`)).join(',')})`
@ -1805,18 +1924,20 @@ class PostgresAdapter extends PostgresAdapterBase {
}
const op = `UPDATE ${tdomain} SET ${part[0].fields.map((it) => (it === 'data' ? 'data = COALESCE(data || update_data._data)' : `"${it}" = update_data."_${it}"`)).join(', ')}
FROM (values ${indexes.join(',')}) AS update_data(__id, ${part[0].fields.map((it) => `"_${it}"`).join(',')})
WHERE "workspaceId" = '${this.workspaceId.name}' AND "_id" = update_data.__id`
WHERE "workspaceId" = $1::uuid AND "_id" = update_data.__id`
await this.mgr.write(ctx.id, async (client) => {
await ctx.with('bulk-update', {}, () => {
return client.unsafe(op, data)
})
})
await this.mgr.retry(ctx.id, (client) =>
ctx.with('bulk-update', {}, () =>
client.unsafe(op, data, {
prepare: dbUnsafePrepareOptions.update
})
)
)
}
}
const toRetrieve = operations.filter((it) => it.retrieve)
if (toRetrieve.length > 0) {
await this.mgr.read(ctx.id, async (client) => {
await this.mgr.retry(ctx.id, async (client) => {
for (const op of toRetrieve) {
const object = await this.findDoc(_ctx, client, op.objectClass, op.objectId)
result.push({ object })
@ -1839,10 +1960,15 @@ class PostgresAdapter extends PostgresAdapterBase {
): Promise<Doc | undefined> {
const domain = this.hierarchy.getDomain(_class)
return ctx.with('find-doc', { _class }, async () => {
const res =
await client`SELECT * FROM ${client(translateDomain(domain))} WHERE "workspaceId" = ${this.workspaceId.name} AND _id = ${_id} ${
forUpdate ? client` FOR UPDATE` : client``
}`
const res = await client.unsafe(
`SELECT * FROM "${translateDomain(domain)}" WHERE "workspaceId" = $1::uuid AND _id = $2::text ${
forUpdate ? ' FOR UPDATE' : ''
}`,
[this.workspaceId.name, _id],
{
prepare: dbUnsafePrepareOptions.find
}
)
const dbDoc = res[0] as any
return dbDoc !== undefined ? parseDoc(dbDoc, getSchema(domain)) : undefined
})
@ -1886,11 +2012,13 @@ class PostgresTxAdapter extends PostgresAdapterBase implements TxAdapter {
}
async getModel (ctx: MeasureContext): Promise<Tx[]> {
const res: DBDoc[] = await this.mgr.read(
undefined,
(client) =>
client`SELECT * FROM ${client(translateDomain(DOMAIN_MODEL_TX))} WHERE "workspaceId" = ${this.workspaceId.name} ORDER BY _id ASC, "modifiedOn" ASC`
)
const res: DBDoc[] = await this.mgr.retry(undefined, (client) => {
return client.unsafe(
`SELECT * FROM "${translateDomain(DOMAIN_MODEL_TX)}" WHERE "workspaceId" = $1::uuid ORDER BY _id::text ASC, "modifiedOn"::bigint ASC`,
[this.workspaceId.name],
{ prepare: dbUnsafePrepareOptions.model }
)
})
const model = res.map((p) => parseDoc<Tx>(p, getSchema(DOMAIN_MODEL_TX)))
// We need to put all core.account.System transactions first

View File

@ -30,7 +30,7 @@ import core, {
} from '@hcengineering/core'
import { PlatformError, unknownStatus } from '@hcengineering/platform'
import { type DomainHelperOperations } from '@hcengineering/server-core'
import postgres from 'postgres'
import postgres, { type Options } from 'postgres'
import {
addSchema,
type DataType,
@ -83,37 +83,48 @@ export async function createTables (
return
}
const mapped = filtered.map((p) => translateDomain(p))
const inArr = mapped.map((it) => `'${it}'`).join(', ')
const tables = await ctx.with('load-table', {}, () =>
client.unsafe(`
client.unsafe(
`
SELECT table_name
FROM information_schema.tables
WHERE table_name IN (${inArr})
`)
FROM information_schema.tables
WHERE table_name = ANY( $1::text[] )
`,
[mapped]
)
)
const exists = new Set(tables.map((it) => it.table_name))
await retryTxn(client, async (client) => {
const domainsToLoad = mapped.filter((it) => exists.has(it))
if (domainsToLoad.length > 0) {
await ctx.with('load-schemas', {}, () => getTableSchema(client, domainsToLoad))
}
for (const domain of mapped) {
if (!exists.has(domain)) {
await ctx.with('create-table', {}, () => createTable(client, domain))
}
const domainsToLoad = mapped.filter((it) => exists.has(it))
if (domainsToLoad.length > 0) {
await ctx.with('load-schemas', {}, () => getTableSchema(client, domainsToLoad))
}
const domainsToCreate: string[] = []
for (const domain of mapped) {
if (!exists.has(domain)) {
domainsToCreate.push(domain)
} else {
loadedDomains.add(url + domain)
}
})
}
if (domainsToCreate.length > 0) {
await retryTxn(client, async (client) => {
for (const domain of domainsToCreate) {
await ctx.with('create-table', {}, () => createTable(client, domain))
loadedDomains.add(url + domain)
}
})
}
}
async function getTableSchema (client: postgres.Sql, domains: string[]): Promise<void> {
const res = await client.unsafe(
`SELECT column_name, data_type, is_nullable, table_name
`SELECT column_name::name, data_type::text, is_nullable::text, table_name::name
FROM information_schema.columns
WHERE table_name = ANY($1) and table_schema = 'public'
ORDER BY table_name, ordinal_position ASC;`,
WHERE table_name = ANY($1::text[]) and table_schema = 'public'::name
ORDER BY table_name::name, ordinal_position::int ASC;`,
[domains]
)
@ -266,6 +277,29 @@ export class ClientRef implements PostgresClientReference {
}
}
let dbExtraOptions: Partial<Options<any>> = {}
export function setDBExtraOptions (options: Partial<Options<any>>): void {
dbExtraOptions = options
}
export interface DbUnsafePrepareOptions {
upload: boolean
find: boolean
update: boolean
model: boolean
}
export let dbUnsafePrepareOptions: DbUnsafePrepareOptions = {
upload: true,
find: true,
update: true,
model: true
}
export function setDbUnsafePrepareOptions (options: DbUnsafePrepareOptions): void {
dbUnsafePrepareOptions = options
}
/**
* Initialize a workspace connection to DB
* @public
@ -285,6 +319,11 @@ export function getDBClient (connectionString: string, database?: string): Postg
transform: {
undefined: null
},
debug: false,
notice: false,
onnotice (notice) {},
onparameter (key, value) {},
...dbExtraOptions,
...extraOptions
})
@ -374,7 +413,7 @@ export function inferType (val: any): string {
return '::boolean'
}
if (Array.isArray(val)) {
const type = inferType(val[0])
const type = inferType(val[0] ?? val[1])
if (type !== '') {
return type + '[]'
}

View File

@ -92,6 +92,8 @@ export function createServerPipeline (
adapterSecurity?: boolean
externalStorage: StorageAdapter
extraLogging?: boolean // If passed, will log every request/etc.
},
extensions?: Partial<DbConfiguration>
): PipelineFactory {

View File

@ -118,11 +118,14 @@ class TSessionManager implements SessionManager {
}
| undefined,
readonly accountsUrl: string,
readonly enableCompression: boolean
readonly enableCompression: boolean,
readonly doHandleTick: boolean = true
) {
this.checkInterval = setInterval(() => {
this.handleTick()
}, 1000 / ticksPerSecond)
if (this.doHandleTick) {
this.checkInterval = setInterval(() => {
this.handleTick()
}, 1000 / ticksPerSecond)
}
}
scheduleMaintenance (timeMinutes: number): void {
@ -1136,7 +1139,8 @@ export function createSessionManager (
}
| undefined,
accountsUrl: string,
enableCompression: boolean
enableCompression: boolean,
doHandleTick: boolean = true
): SessionManager {
return new TSessionManager(
ctx,
@ -1145,7 +1149,8 @@ export function createSessionManager (
brandingMap ?? null,
profiling,
accountsUrl,
enableCompression
enableCompression,
doHandleTick
)
}

View File

@ -22,7 +22,7 @@
"@hcengineering/platform-rig": "*"
},
"devDependencies": {
"wrangler": "^3.97.0",
"wrangler": "^3.103.1",
"@typescript-eslint/eslint-plugin": "^6.11.0",
"@typescript-eslint/parser": "^6.11.0",
"eslint-config-standard-with-typescript": "^40.0.0",

View File

@ -22,7 +22,7 @@
"@hcengineering/platform-rig": "^0.6.0",
"@cloudflare/workers-types": "^4.20241022.0",
"typescript": "^5.3.3",
"wrangler": "^3.97.0",
"wrangler": "^3.103.1",
"jest": "^29.7.0",
"prettier": "^3.1.0",
"ts-jest": "^29.1.1",

View File

@ -38,7 +38,7 @@
"prettier": "^3.1.0",
"ts-jest": "^29.1.1",
"typescript": "^5.3.3",
"wrangler": "^3.97.0",
"wrangler": "^3.103.1",
"esbuild": "^0.24.2",
"@types/snappyjs": "^0.7.1"
},

View File

@ -45,6 +45,7 @@ export default {
&copy; 2024 <a href="https://hulylabs.com">Huly Labs</a>`
)
)
.all('*', () => error(404))
return await router.fetch(request).catch(error)
}

View File

@ -0,0 +1,36 @@
//
// Copyright © 2025 Hardcore Engineering Inc.
//
import type { MeasureLogger, ParamsType } from '@hcengineering/core'
export class CloudFlareLogger implements MeasureLogger {
error (message: string, obj?: Record<string, any>): void {
// Check if obj has error inside, so we could send it to Analytics
for (const v of Object.values(obj ?? {})) {
if (v instanceof Error) {
// Analytics.handleError(v)
}
}
console.error({ message, ...obj })
}
info (message: string, obj?: Record<string, any>): void {
console.info({ message, ...obj })
}
warn (message: string, obj?: Record<string, any>): void {
console.warn({ message, ...obj })
}
logOperation (operation: string, time: number, params: ParamsType): void {
console.info({ time, ...params, message: operation })
}
childLogger (name: string, params: Record<string, string>): MeasureLogger {
return this
}
async close (): Promise<void> {}
}

View File

@ -2,6 +2,8 @@
import {
generateId,
MeasureMetricsContext,
newMetrics,
type Class,
type Doc,
type DocumentQuery,
@ -27,12 +29,18 @@ import {
} from '@hcengineering/server-core'
import serverPlugin, { decodeToken, type Token } from '@hcengineering/server-token'
import { DurableObject } from 'cloudflare:workers'
import { compress } from 'snappyjs'
import { promisify } from 'util'
import { gzip } from 'zlib'
import { compress } from 'snappyjs'
// Approach usefull only for separate build, after model-all bundle phase is executed.
import { createPostgreeDestroyAdapter, createPostgresAdapter, createPostgresTxAdapter } from '@hcengineering/postgres'
import {
createPostgreeDestroyAdapter,
createPostgresAdapter,
createPostgresTxAdapter,
setDBExtraOptions,
setDbUnsafePrepareOptions
} from '@hcengineering/postgres'
import {
createServerPipeline,
registerAdapterFactory,
@ -41,6 +49,7 @@ import {
registerStringLoaders,
registerTxAdapterFactory
} from '@hcengineering/server-pipeline'
import { CloudFlareLogger } from './logger'
import model from './model.json'
export const PREFERRED_SAVE_SIZE = 500
@ -62,6 +71,18 @@ export class Transactor extends DurableObject<Env> {
constructor (ctx: DurableObjectState, env: Env) {
super(ctx, env)
setDBExtraOptions({
ssl: false,
connection: {
application_name: 'cloud-transactor'
}
})
setDbUnsafePrepareOptions({
upload: false,
find: false,
update: false,
model: false
})
registerTxAdapterFactory('postgresql', createPostgresTxAdapter, true)
registerAdapterFactory('postgresql', createPostgresAdapter, true)
registerDestroyFactory('postgresql', createPostgreeDestroyAdapter, true)
@ -74,13 +95,15 @@ export class Transactor extends DurableObject<Env> {
this.measureCtx = this.measureCtx = initStatisticsContext('cloud-transactor', {
statsUrl: this.env.STATS_URL ?? 'http://127.0.0.1:4900',
serviceName: () => 'cloud-transactor: ' + this.workspace
serviceName: () => 'cloud-transactor: ' + this.workspace,
factory: () => new MeasureMetricsContext('transactor', {}, {}, newMetrics(), new CloudFlareLogger())
})
setMetadata(serverPlugin.metadata.Secret, env.SERVER_SECRET ?? 'secret')
console.log({ message: 'Connecting DB', mode: env.DB_URL !== '' ? 'Direct ' : 'Hyperdrive' })
console.log({ message: 'use stats: ' + (this.env.STATS_URL ?? 'http://127.0.0.1:4900') })
console.log({ message: 'use stats', url: this.env.STATS_URL })
console.log({ message: 'use fulltext', url: this.env.FULLTEXT_URL })
// TODO:
const storage = createDummyStorageAdapter()
@ -88,34 +111,40 @@ export class Transactor extends DurableObject<Env> {
this.pipelineFactory = async (ctx, ws, upgrade, broadcast, branding) => {
const pipeline = createServerPipeline(
this.measureCtx,
env.DB_URL !== '' && env.DB_URL !== undefined ? env.DB_URL : env.HYPERDRIVE.connectionString,
env.DB_MODE === 'direct' ? env.DB_URL ?? '' : env.HYPERDRIVE.connectionString,
model,
{
externalStorage: storage,
adapterSecurity: false,
disableTriggers: false,
fulltextUrl: env.FULLTEXT_URL // TODO: Pass fulltext service URI.
fulltextUrl: env.FULLTEXT_URL,
extraLogging: true
}
)
return await pipeline(ctx, ws, upgrade, broadcast, branding)
}
void this.ctx.blockConcurrencyWhile(async () => {
setMetadata(serverClient.metadata.Endpoint, env.ACCOUNTS_URL)
void this.ctx
.blockConcurrencyWhile(async () => {
setMetadata(serverClient.metadata.Endpoint, env.ACCOUNTS_URL)
this.sessionManager = createSessionManager(
this.measureCtx,
(token: Token, workspace) => new ClientSession(token, workspace, false),
loadBrandingMap(), // TODO: Support branding map
{
pingTimeout: 10000,
reconnectTimeout: 3000
},
undefined,
this.accountsUrl,
env.ENABLE_COMPRESSION === 'true'
)
})
this.sessionManager = createSessionManager(
this.measureCtx,
(token: Token, workspace) => new ClientSession(token, workspace, false),
loadBrandingMap(), // TODO: Support branding map
{
pingTimeout: 10000,
reconnectTimeout: 3000
},
undefined,
this.accountsUrl,
env.ENABLE_COMPRESSION === 'true',
false
)
})
.catch((err) => {
console.error('Failed to init transactor', err)
})
}
async fetch (request: Request): Promise<Response> {

View File

@ -16,4 +16,6 @@ interface Env {
ENABLE_COMPRESSION: string | undefined
FULLTEXT_URL: string | undefined
DB_MODE: 'hyperdrive' | 'direct' | undefined
}

View File

@ -7,6 +7,7 @@ keep_vars = true
[observability.logs]
enabled = true
head_sampling_rate = 1 # optional. default = 1.
# Automatically place your workloads in an optimal location to minimize latency.
# If you are running back-end logic in a Worker, running it closer to your back-end infrastructure
@ -23,6 +24,7 @@ enabled = true
[vars]
# ACCOUNTS_URL = "http://127.0.0.1:3000"
# SERVER_SECRET = "secret"
DB_MODE='hyperdrive'
# Bind the Workers AI model catalog. Run machine learning models, powered by serverless GPUs, on Cloudflares global network
# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#workers-ai