Merge remote-tracking branch 'origin/develop' into staging

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2024-10-23 22:09:25 +07:00
commit 55a0d79a1c
No known key found for this signature in database
GPG Key ID: BD80F68D68D8F7F2
94 changed files with 693 additions and 386 deletions

View File

@ -190,7 +190,7 @@ dependencies:
version: file:projects/core.tgz(@types/node@20.11.19)(esbuild@0.20.1)(ts-node@10.9.2) version: file:projects/core.tgz(@types/node@20.11.19)(esbuild@0.20.1)(ts-node@10.9.2)
'@rush-temp/datalake': '@rush-temp/datalake':
specifier: file:./projects/datalake.tgz specifier: file:./projects/datalake.tgz
version: file:projects/datalake.tgz(esbuild@0.20.1)(ts-node@10.9.2) version: file:projects/datalake.tgz(esbuild@0.20.1)
'@rush-temp/desktop': '@rush-temp/desktop':
specifier: file:./projects/desktop.tgz specifier: file:./projects/desktop.tgz
version: file:projects/desktop.tgz(bufferutil@4.0.8)(sass@1.71.1)(utf-8-validate@6.0.4) version: file:projects/desktop.tgz(bufferutil@4.0.8)(sass@1.71.1)(utf-8-validate@6.0.4)
@ -27080,13 +27080,12 @@ packages:
- ts-node - ts-node
dev: false dev: false
file:projects/datalake.tgz(esbuild@0.20.1)(ts-node@10.9.2): file:projects/datalake.tgz(esbuild@0.20.1):
resolution: {integrity: sha512-pqgfJAfjDTa3AWRK263xljvkd1GLinDFrjTGW7res8krRskMMJ3K6gj3kfnLjyKmWeAesJQ5CSnFybPnPSJq/Q==, tarball: file:projects/datalake.tgz} resolution: {integrity: sha512-UX1RJWMtrQY5HWrFKnwi2vrRYfR8ZSRo2PtLn04ozWueiiLS3Q61UauAUfPDRtO0K5cJgecH7+gX750dx8oUhQ==, tarball: file:projects/datalake.tgz}
id: file:projects/datalake.tgz id: file:projects/datalake.tgz
name: '@rush-temp/datalake' name: '@rush-temp/datalake'
version: 0.0.0 version: 0.0.0
dependencies: dependencies:
'@aws-sdk/client-s3': 3.577.0
'@types/jest': 29.5.12 '@types/jest': 29.5.12
'@types/node': 20.11.19 '@types/node': 20.11.19
'@types/node-fetch': 2.6.11 '@types/node-fetch': 2.6.11
@ -27102,17 +27101,19 @@ packages:
node-fetch: 2.7.0 node-fetch: 2.7.0
prettier: 3.2.5 prettier: 3.2.5
ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3) ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3)
ts-node: 10.9.2(@types/node@20.11.19)(typescript@5.3.3)
typescript: 5.3.3 typescript: 5.3.3
transitivePeerDependencies: transitivePeerDependencies:
- '@babel/core' - '@babel/core'
- '@jest/types' - '@jest/types'
- '@swc/core'
- '@swc/wasm'
- babel-jest - babel-jest
- babel-plugin-macros - babel-plugin-macros
- encoding - encoding
- esbuild - esbuild
- node-notifier - node-notifier
- supports-color - supports-color
- ts-node
dev: false dev: false
file:projects/desktop-1.tgz(webpack@5.90.3): file:projects/desktop-1.tgz(webpack@5.90.3):

View File

@ -18,7 +18,14 @@ import {
type MeasureMetricsContext type MeasureMetricsContext
} from '@hcengineering/core' } from '@hcengineering/core'
import { getMongoClient, getWorkspaceMongoDB } from '@hcengineering/mongo' import { getMongoClient, getWorkspaceMongoDB } from '@hcengineering/mongo'
import { convertDoc, createTable, getDBClient, retryTxn, translateDomain } from '@hcengineering/postgres' import {
convertDoc,
createTable,
getDBClient,
getDocFieldsByDomains,
retryTxn,
translateDomain
} from '@hcengineering/postgres'
import { getTransactorEndpoint } from '@hcengineering/server-client' import { getTransactorEndpoint } from '@hcengineering/server-client'
import { generateToken } from '@hcengineering/server-token' import { generateToken } from '@hcengineering/server-token'
import { connect } from '@hcengineering/server-tool' import { connect } from '@hcengineering/server-tool'
@ -54,10 +61,6 @@ export async function moveFromMongoToPG (
client.close() client.close()
} }
function escapeBackticks (str: string): string {
return str.replaceAll("'", "''")
}
async function moveWorkspace ( async function moveWorkspace (
accountDb: AccountDB, accountDb: AccountDB,
mongo: MongoClient, mongo: MongoClient,
@ -85,6 +88,13 @@ async function moveWorkspace (
const currentIds = new Set(current.rows.map((r) => r._id)) const currentIds = new Set(current.rows.map((r) => r._id))
console.log('move domain', domain) console.log('move domain', domain)
const docs: Doc[] = [] const docs: Doc[] = []
const fields = getDocFieldsByDomains(domain)
const filedsWithData = [...fields, 'data']
const insertFields: string[] = []
for (const field of filedsWithData) {
insertFields.push(`"${field}"`)
}
const insertStr = insertFields.join(', ')
while (true) { while (true) {
while (docs.length < 50000) { while (docs.length < 50000) {
const doc = (await cursor.next()) as Doc | null const doc = (await cursor.next()) as Doc | null
@ -95,18 +105,29 @@ async function moveWorkspace (
if (docs.length === 0) break if (docs.length === 0) break
while (docs.length > 0) { while (docs.length > 0) {
const part = docs.splice(0, 500) const part = docs.splice(0, 500)
const vals = part const values: any[] = []
.map((doc) => { const vars: string[] = []
const d = convertDoc(doc, ws.workspace) let index = 1
return `('${d._id}', '${d.workspaceId}', '${d._class}', '${d.createdBy ?? d.modifiedBy}', '${d.modifiedBy}', ${d.modifiedOn}, ${d.createdOn ?? d.modifiedOn}, '${d.space}', ${ for (let i = 0; i < part.length; i++) {
d.attachedTo != null ? `'${d.attachedTo}'` : 'NULL' const doc = part[i]
}, '${escapeBackticks(JSON.stringify(d.data))}')` const variables: string[] = []
}) const d = convertDoc(domain, doc, ws.workspace)
.join(', ') values.push(d.workspaceId)
variables.push(`$${index++}`)
for (const field of fields) {
values.push(d[field])
variables.push(`$${index++}`)
}
values.push(d.data)
variables.push(`$${index++}`)
vars.push(`(${variables.join(', ')})`)
}
const vals = vars.join(',')
try { try {
await retryTxn(pgClient, async (client) => { await retryTxn(pgClient, async (client) => {
await client.query( await client.query(
`INSERT INTO ${translateDomain(domain)} (_id, "workspaceId", _class, "createdBy", "modifiedBy", "modifiedOn", "createdOn", space, "attachedTo", data) VALUES ${vals}` `INSERT INTO ${translateDomain(domain)} ("workspaceId", ${insertStr}) VALUES ${vals}`,
values
) )
}) })
} catch (err) { } catch (err) {

View File

@ -371,7 +371,7 @@ export function devTool (
lastProcessingTime: Date.now() + 1000 * 60 lastProcessingTime: Date.now() + 1000 * 60
}) })
await createWorkspace(measureCtx, version, brandingObj, wsInfo, txes, migrateOperations) await createWorkspace(measureCtx, version, brandingObj, wsInfo, txes, migrateOperations, undefined, true)
await updateWorkspace(db, wsInfo, { await updateWorkspace(db, wsInfo, {
mode: 'active', mode: 'active',
@ -1717,7 +1717,7 @@ export function devTool (
lastProcessingTime: Date.now() + 1000 * 60 lastProcessingTime: Date.now() + 1000 * 60
}) })
await createWorkspace(measureCtx, version, null, wsInfo, txes, migrateOperations) await createWorkspace(measureCtx, version, null, wsInfo, txes, migrateOperations, undefined, true)
await updateWorkspace(db, wsInfo, { await updateWorkspace(db, wsInfo, {
mode: 'active', mode: 'active',

View File

@ -133,18 +133,6 @@ describe('memdb', () => {
}) })
const objClass = (await model.findAll(core.class.Class, { _id: core.class.Obj }))[0] as any const objClass = (await model.findAll(core.class.Class, { _id: core.class.Obj }))[0] as any
expect(objClass['test:mixin:TestMixin'].arr).toEqual(expect.arrayContaining(['hello'])) expect(objClass['test:mixin:TestMixin'].arr).toEqual(expect.arrayContaining(['hello']))
await ops.updateDoc(test.mixin.TestMixin, core.space.Model, core.class.Obj as unknown as Ref<TestMixin>, {
$pushMixin: {
$mixin: test.mixin.TestMixin,
values: {
arr: 'there'
}
}
})
const objClass2 = (await model.findAll(core.class.Class, { _id: core.class.Obj }))[0] as any
expect(objClass2['test:mixin:TestMixin'].arr).toEqual(expect.arrayContaining(['hello', 'there']))
}) })
it('should allow delete', async () => { it('should allow delete', async () => {

View File

@ -116,37 +116,6 @@ function $update (document: Doc, keyval: Record<string, PropertyType>): void {
} }
} }
function $move (document: Doc, keyval: Record<string, PropertyType>): void {
const doc = document as any
for (const key in keyval) {
if (doc[key] === undefined) {
doc[key] = []
}
const arr = doc[key] as Array<any>
const desc = keyval[key]
doc[key] = (arr ?? []).filter((val) => val !== desc.$value)
doc[key].splice(desc.$position, 0, desc.$value)
}
}
function $pushMixin (document: Doc, options: any): void {
const doc = document as any
const mixinId = options.$mixin
if (mixinId === undefined) {
throw new Error('$mixin must be specified for $push_mixin operation')
}
const mixin = doc[mixinId]
const keyval = options.values
for (const key in keyval) {
const arr = mixin[key]
if (arr == null) {
mixin[key] = [keyval[key]]
} else {
arr.push(keyval[key])
}
}
}
function $inc (document: Doc, keyval: Record<string, number>): void { function $inc (document: Doc, keyval: Record<string, number>): void {
const doc = document as unknown as Record<string, number | undefined> const doc = document as unknown as Record<string, number | undefined>
for (const key in keyval) { for (const key in keyval) {
@ -180,8 +149,6 @@ const operators: Record<string, _OperatorFunc> = {
$push, $push,
$pull, $pull,
$update, $update,
$move,
$pushMixin,
$inc, $inc,
$unset, $unset,
$rename $rename

View File

@ -247,7 +247,6 @@ export type OmitNever<T extends object> = Omit<T, KeysByType<T, never>>
export interface PushOptions<T extends object> { export interface PushOptions<T extends object> {
$push?: Partial<OmitNever<ArrayAsElementPosition<Required<T>>>> $push?: Partial<OmitNever<ArrayAsElementPosition<Required<T>>>>
$pull?: Partial<OmitNever<ArrayAsElement<Required<T>>>> $pull?: Partial<OmitNever<ArrayAsElement<Required<T>>>>
$move?: Partial<OmitNever<ArrayMoveDescriptor<Required<T>>>>
} }
/** /**
@ -269,16 +268,6 @@ export interface SetEmbeddedOptions<T extends object> {
$update?: Partial<OmitNever<ArrayAsElementUpdate<Required<T>>>> $update?: Partial<OmitNever<ArrayAsElementUpdate<Required<T>>>>
} }
/**
* @public
*/
export interface PushMixinOptions<D extends Doc> {
$pushMixin?: {
$mixin: Ref<Mixin<D>>
values: Partial<OmitNever<ArrayAsElement<D>>>
}
}
/** /**
* @public * @public
*/ */
@ -299,7 +288,6 @@ export interface SpaceUpdate {
export type DocumentUpdate<T extends Doc> = Partial<Data<T>> & export type DocumentUpdate<T extends Doc> = Partial<Data<T>> &
PushOptions<T> & PushOptions<T> &
SetEmbeddedOptions<T> & SetEmbeddedOptions<T> &
PushMixinOptions<T> &
IncOptions<T> & IncOptions<T> &
SpaceUpdate SpaceUpdate

View File

@ -29,7 +29,7 @@ import { ModelLogger } from './utils'
* @public * @public
*/ */
export type MigrateUpdate<T extends Doc> = Partial<T> & export type MigrateUpdate<T extends Doc> = Partial<T> &
Omit<PushOptions<T>, '$move'> & PushOptions<T> &
IncOptions<T> & IncOptions<T> &
UnsetOptions & UnsetOptions &
Record<string, any> Record<string, any>

View File

@ -117,7 +117,7 @@ async function createPersonSpace (
person: Ref<Person>, person: Ref<Person>,
control: TriggerControl control: TriggerControl
): Promise<TxCUD<PersonSpace>[]> { ): Promise<TxCUD<PersonSpace>[]> {
const personSpace = (await control.findAll(control.ctx, contact.class.PersonSpace, { person }, { limit: 1 })).shift() const personSpace = (await control.findAll(control.ctx, contact.class.PersonSpace, { person }, { limit: 1 }))[0]
if (personSpace !== undefined) { if (personSpace !== undefined) {
const toAdd = account.filter((it) => !personSpace.members.includes(it)) const toAdd = account.filter((it) => !personSpace.members.includes(it))
if (toAdd.length === 0) return [] if (toAdd.length === 0) return []

View File

@ -1717,7 +1717,7 @@ async function updateCollaborators (
if (hierarchy.classHierarchyMixin(objectClass, activity.mixin.ActivityDoc) === undefined) return res if (hierarchy.classHierarchyMixin(objectClass, activity.mixin.ActivityDoc) === undefined) return res
const contexts = await control.findAll(control.ctx, notification.class.DocNotifyContext, { attachedTo: objectId }) const contexts = await control.findAll(control.ctx, notification.class.DocNotifyContext, { objectId })
const addedInfo = await getUsersInfo(ctx, toAdd as Ref<PersonAccount>[], control) const addedInfo = await getUsersInfo(ctx, toAdd as Ref<PersonAccount>[], control)
for (const addedUser of addedInfo.values()) { for (const addedUser of addedInfo.values()) {

View File

@ -118,6 +118,7 @@ class AdapterStorage implements BackupStorage {
*/ */
export async function createFileBackupStorage (fileName: string): Promise<BackupStorage> { export async function createFileBackupStorage (fileName: string): Promise<BackupStorage> {
if (!existsSync(fileName)) { if (!existsSync(fileName)) {
console.log(__dirname)
await mkdir(fileName, { recursive: true }) await mkdir(fileName, { recursive: true })
} }
return new FileStorage(fileName) return new FileStorage(fileName)

View File

@ -32,7 +32,8 @@
"jest": "^29.7.0", "jest": "^29.7.0",
"ts-jest": "^29.1.1", "ts-jest": "^29.1.1",
"@types/jest": "^29.5.5", "@types/jest": "^29.5.5",
"@types/node-fetch": "~2.6.2" "@types/node-fetch": "~2.6.2",
"ts-node": "^10.8.0"
}, },
"dependencies": { "dependencies": {
"@hcengineering/core": "^0.6.32", "@hcengineering/core": "^0.6.32",

View File

@ -49,11 +49,7 @@ type BlobUploadResult = BlobUploadSuccess | BlobUploadError
/** @public */ /** @public */
export class Client { export class Client {
private readonly endpoint: string constructor (private readonly endpoint: string) {}
constructor (host: string, port?: number) {
this.endpoint = port !== undefined ? `${host}:${port}` : host
}
getObjectUrl (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): string { getObjectUrl (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): string {
const path = `/blob/${workspace.name}/${encodeURIComponent(objectName)}` const path = `/blob/${workspace.name}/${encodeURIComponent(objectName)}`
@ -62,7 +58,14 @@ export class Client {
async getObject (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<Readable> { async getObject (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<Readable> {
const url = this.getObjectUrl(ctx, workspace, objectName) const url = this.getObjectUrl(ctx, workspace, objectName)
const response = await fetchSafe(ctx, url)
let response
try {
response = await fetchSafe(ctx, url)
} catch (err) {
console.error('failed to get object', { workspace, objectName, err })
throw err
}
if (response.body == null) { if (response.body == null) {
ctx.error('bad datalake response', { objectName }) ctx.error('bad datalake response', { objectName })
@ -81,10 +84,16 @@ export class Client {
): Promise<Readable> { ): Promise<Readable> {
const url = this.getObjectUrl(ctx, workspace, objectName) const url = this.getObjectUrl(ctx, workspace, objectName)
const headers = { const headers = {
Range: `bytes=${offset}-${length ?? ''}` Range: length !== undefined ? `bytes=${offset}-${offset + length - 1}` : `bytes=${offset}`
} }
const response = await fetchSafe(ctx, url, { headers }) let response
try {
response = await fetchSafe(ctx, url, { headers })
} catch (err) {
console.error('failed to get partial object', { workspace, objectName, err })
throw err
}
if (response.body == null) { if (response.body == null) {
ctx.error('bad datalake response', { objectName }) ctx.error('bad datalake response', { objectName })
@ -101,7 +110,13 @@ export class Client {
): Promise<StatObjectOutput | undefined> { ): Promise<StatObjectOutput | undefined> {
const url = this.getObjectUrl(ctx, workspace, objectName) const url = this.getObjectUrl(ctx, workspace, objectName)
const response = await fetchSafe(ctx, url, { method: 'HEAD' }) let response: Response
try {
response = await fetchSafe(ctx, url, { method: 'HEAD' })
} catch (err) {
console.error('failed to stat object', { workspace, objectName, err })
throw err
}
const headers = response.headers const headers = response.headers
const lastModified = Date.parse(headers.get('Last-Modified') ?? '') const lastModified = Date.parse(headers.get('Last-Modified') ?? '')
@ -117,7 +132,12 @@ export class Client {
async deleteObject (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<void> { async deleteObject (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<void> {
const url = this.getObjectUrl(ctx, workspace, objectName) const url = this.getObjectUrl(ctx, workspace, objectName)
await fetchSafe(ctx, url, { method: 'DELETE' }) try {
await fetchSafe(ctx, url, { method: 'DELETE' })
} catch (err) {
console.error('failed to delete object', { workspace, objectName, err })
throw err
}
} }
async putObject ( async putObject (
@ -128,14 +148,30 @@ export class Client {
metadata: ObjectMetadata, metadata: ObjectMetadata,
size?: number size?: number
): Promise<void> { ): Promise<void> {
if (size === undefined || size < 64 * 1024 * 1024) { if (size === undefined) {
await ctx.with('direct-upload', {}, async (ctx) => { if (Buffer.isBuffer(stream)) {
await this.uploadWithFormData(ctx, workspace, objectName, stream, metadata) size = stream.length
}) } else if (typeof stream === 'string') {
} else { size = Buffer.byteLength(stream)
await ctx.with('signed-url-upload', {}, async (ctx) => { } else {
await this.uploadWithSignedURL(ctx, workspace, objectName, stream, metadata) // TODO: Implement size calculation for Readable streams
}) ctx.warn('unknown object size', { workspace, objectName })
}
}
try {
if (size === undefined || size < 64 * 1024 * 1024) {
await ctx.with('direct-upload', {}, async (ctx) => {
await this.uploadWithFormData(ctx, workspace, objectName, stream, metadata)
})
} else {
await ctx.with('signed-url-upload', {}, async (ctx) => {
await this.uploadWithSignedURL(ctx, workspace, objectName, stream, metadata)
})
}
} catch (err) {
console.error('failed to put object', { workspace, objectName, err })
throw err
} }
} }
@ -164,14 +200,12 @@ export class Client {
const result = (await response.json()) as BlobUploadResult[] const result = (await response.json()) as BlobUploadResult[]
if (result.length !== 1) { if (result.length !== 1) {
ctx.error('bad datalake response', { objectName, result }) throw new Error('Bad datalake response: ' + result.toString())
throw new Error('Bad datalake response')
} }
const uploadResult = result[0] const uploadResult = result[0]
if ('error' in uploadResult) { if ('error' in uploadResult) {
ctx.error('error during blob upload', { objectName, error: uploadResult.error })
throw new Error('Upload failed: ' + uploadResult.error) throw new Error('Upload failed: ' + uploadResult.error)
} }
} }
@ -195,26 +229,43 @@ export class Client {
'x-amz-meta-last-modified': metadata.lastModified.toString() 'x-amz-meta-last-modified': metadata.lastModified.toString()
} }
}) })
await this.signObjectComplete(ctx, workspace, objectName) } catch (err) {
} catch {
await this.signObjectDelete(ctx, workspace, objectName) await this.signObjectDelete(ctx, workspace, objectName)
throw new Error('Failed to upload via signed URL')
} }
await this.signObjectComplete(ctx, workspace, objectName)
} }
private async signObjectSign (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<string> { private async signObjectSign (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<string> {
const url = this.getSignObjectUrl(workspace, objectName) try {
const response = await fetchSafe(ctx, url, { method: 'POST' }) const url = this.getSignObjectUrl(workspace, objectName)
return await response.text() const response = await fetchSafe(ctx, url, { method: 'POST' })
return await response.text()
} catch (err: any) {
ctx.error('failed to sign object', { workspace, objectName, err })
throw new Error('Failed to sign URL')
}
} }
private async signObjectComplete (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<void> { private async signObjectComplete (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<void> {
const url = this.getSignObjectUrl(workspace, objectName) try {
await fetchSafe(ctx, url, { method: 'PUT' }) const url = this.getSignObjectUrl(workspace, objectName)
await fetchSafe(ctx, url, { method: 'PUT' })
} catch (err: any) {
ctx.error('failed to complete signed url upload', { workspace, objectName, err })
throw new Error('Failed to complete signed URL upload')
}
} }
private async signObjectDelete (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<void> { private async signObjectDelete (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<void> {
const url = this.getSignObjectUrl(workspace, objectName) try {
await fetchSafe(ctx, url, { method: 'DELETE' }) const url = this.getSignObjectUrl(workspace, objectName)
await fetchSafe(ctx, url, { method: 'DELETE' })
} catch (err: any) {
ctx.error('failed to abort signed url upload', { workspace, objectName, err })
throw new Error('Failed to abort signed URL upload')
}
} }
private getSignObjectUrl (workspace: WorkspaceId, objectName: string): string { private getSignObjectUrl (workspace: WorkspaceId, objectName: string): string {
@ -228,12 +279,13 @@ async function fetchSafe (ctx: MeasureContext, url: string, init?: RequestInit):
try { try {
response = await fetch(url, init) response = await fetch(url, init)
} catch (err: any) { } catch (err: any) {
ctx.error('network error', { error: err }) ctx.error('network error', { err })
throw new Error(`Network error ${err}`) throw new Error(`Network error ${err}`)
} }
if (!response.ok) { if (!response.ok) {
throw new Error(response.status === 404 ? 'Not Found' : 'HTTP error ' + response.status) const text = await response.text()
throw new Error(response.status === 404 ? 'Not Found' : 'HTTP error ' + response.status + ': ' + text)
} }
return response return response

View File

@ -37,7 +37,7 @@ export class DatalakeService implements StorageAdapter {
static config = 'datalake' static config = 'datalake'
client: Client client: Client
constructor (readonly opt: DatalakeConfig) { constructor (readonly opt: DatalakeConfig) {
this.client = new Client(opt.endpoint, opt.port) this.client = new Client(opt.endpoint)
} }
async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {} async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {}
@ -167,23 +167,15 @@ export class DatalakeService implements StorageAdapter {
} }
export function processConfigFromEnv (storageConfig: StorageConfiguration): string | undefined { export function processConfigFromEnv (storageConfig: StorageConfiguration): string | undefined {
let endpoint = process.env.DATALAKE_ENDPOINT const endpoint = process.env.DATALAKE_ENDPOINT
if (endpoint === undefined) { if (endpoint === undefined) {
return 'DATALAKE_ENDPOINT' return 'DATALAKE_ENDPOINT'
} }
let port = 80
const sp = endpoint.split(':')
if (sp.length > 1) {
endpoint = sp[0]
port = parseInt(sp[1])
}
const config: DatalakeConfig = { const config: DatalakeConfig = {
kind: 'datalake', kind: 'datalake',
name: 'datalake', name: 'datalake',
endpoint, endpoint
port
} }
storageConfig.storages.push(config) storageConfig.storages.push(config)
storageConfig.default = 'datalake' storageConfig.default = 'datalake'

View File

@ -0,0 +1,102 @@
import { MeasureMetricsContext, generateId } from '@hcengineering/core'
import type { StorageConfiguration } from '@hcengineering/server-core'
import { DatalakeService, processConfigFromEnv, type DatalakeConfig } from '.'
const MB = 1024 * 1024
const config: StorageConfiguration = { default: 'minio', storages: [] }
const minioConfigVar = processConfigFromEnv(config)
if (minioConfigVar !== undefined || config.storages[0] === undefined) {
console.error('No Datalake config env is configured:' + minioConfigVar)
it.skip('No Datalake config env is configured', async () => {})
process.exit(1)
}
const toolCtx = new MeasureMetricsContext('test', {})
const storageService = new DatalakeService({ ...(config.storages[0] as DatalakeConfig) })
async function doTest (): Promise<void> {
const genWorkspaceId1 = generateId()
const ws1 = { name: genWorkspaceId1 }
await storageService.make(toolCtx, ws1)
/// /////// Uploads
console.log('upload 1mb test')
let st1 = Date.now()
const sz = 10
const stream = Buffer.alloc(sz * 1024 * 1024)
for (let i = 0; i < 10; i++) {
// We need 1Mb random file to check upload speed.
const st = Date.now()
await storageService.put(toolCtx, ws1, `testObject.${i}`, stream, 'application/octet-stream', stream.length)
console.log('upload time', Date.now() - st)
}
let now = Date.now()
console.log(`upload performance: ${Math.round((sz * 10 * 1000 * 100) / (now - st1)) / 100} mb per second`)
/// // Downloads 1
console.log('download 1mb test')
st1 = Date.now()
for (let i = 0; i < 10; i++) {
// We need 1Mb random file to check upload speed.
const st = Date.now()
await storageService.read(toolCtx, ws1, `testObject.${i}`)
console.log('download time', Date.now() - st)
}
now = Date.now()
console.log(`download performance: ${Math.round((sz * 10 * 1000 * 100) / (now - st1)) / 100} mb per second`)
/// Downloads 2
st1 = Date.now()
for (let i = 0; i < 10; i++) {
// We need 1Mb random file to check upload speed.
const st = Date.now()
const readable = await storageService.get(toolCtx, ws1, `testObject.${i}`)
const chunks: Buffer[] = []
readable.on('data', (chunk) => {
chunks.push(chunk)
})
await new Promise<void>((resolve) => {
readable.on('end', () => {
resolve()
readable.destroy()
})
})
console.log('download time 2', Date.now() - st)
}
now = Date.now()
console.log(`download performance: ${Math.round((sz * 10 * 1000 * 100) / (now - st1)) / 100} mb per second`)
/// Downloads 3
console.log('download partial test')
st1 = Date.now()
for (let i = 0; i < 10; i++) {
// We need 1Mb random file to check upload speed.
const st = Date.now()
for (let i = 0; i < sz; i++) {
const readable = await storageService.partial(toolCtx, ws1, `testObject.${i}`, i * MB, MB)
const chunks: Buffer[] = []
readable.on('data', (chunk) => {
chunks.push(chunk)
})
await new Promise<void>((resolve) => {
readable.on('end', () => {
resolve()
readable.destroy()
})
})
}
console.log('download time 2', Date.now() - st)
}
now = Date.now()
console.log(`download performance: ${Math.round((sz * 10 * 1000 * 100) / (now - st1)) / 100} mb per second`)
}
void doTest().catch((err) => {
console.error(err)
})
console.log('done')

View File

@ -1407,23 +1407,6 @@ class MongoAdapter extends MongoAdapterBase {
modifiedOn: tx.modifiedOn modifiedOn: tx.modifiedOn
} }
if (isOperator(tx.attributes)) { if (isOperator(tx.attributes)) {
const operator = Object.keys(tx.attributes)[0]
if (operator === '$move') {
const keyval = (tx.attributes as any).$move
const arr = tx.mixin + '.' + Object.keys(keyval)[0]
const desc = keyval[arr]
const ops: any = [
{ updateOne: { filter, update: { $pull: { [arr]: desc.$value } } } },
{
updateOne: {
filter,
update: { $set: modifyOp, $push: { [arr]: { $each: [desc.$value], $position: desc.$position } } }
}
}
]
bulk.bulkOperations.push(...ops)
return
}
const update = { ...this.translateMixinAttrs(tx.mixin, tx.attributes), $set: { ...modifyOp } } const update = { ...this.translateMixinAttrs(tx.mixin, tx.attributes), $set: { ...modifyOp } }
bulk.bulkOperations.push({ bulk.bulkOperations.push({
@ -1475,46 +1458,7 @@ class MongoAdapter extends MongoAdapterBase {
protected txUpdateDoc (bulk: OperationBulk, tx: TxUpdateDoc<Doc>): void { protected txUpdateDoc (bulk: OperationBulk, tx: TxUpdateDoc<Doc>): void {
if (isOperator(tx.operations)) { if (isOperator(tx.operations)) {
const operator = Object.keys(tx.operations)[0] const operator = Object.keys(tx.operations)[0]
if (operator === '$move') { if (operator === '$update') {
const keyval = (tx.operations as any).$move
const arr = Object.keys(keyval)[0]
const desc = keyval[arr]
const ops: any = [
{
updateOne: {
filter: { _id: tx.objectId },
update: {
$set: {
'%hash%': null
},
$pull: {
[arr]: desc.$value
}
}
}
},
{
updateOne: {
filter: { _id: tx.objectId },
update: {
$set: {
modifiedBy: tx.modifiedBy,
modifiedOn: tx.modifiedOn,
'%hash%': null
},
$push: {
[arr]: {
$each: [desc.$value],
$position: desc.$position
}
}
}
}
}
]
bulk.bulkOperations.push(...ops)
} else if (operator === '$update') {
const keyval = (tx.operations as any).$update const keyval = (tx.operations as any).$update
const arr = Object.keys(keyval)[0] const arr = Object.keys(keyval)[0]
const desc = keyval[arr] as QueryUpdate<any> const desc = keyval[arr] as QueryUpdate<any>

View File

@ -14,4 +14,4 @@
// //
export * from './storage' export * from './storage'
export { getDBClient, convertDoc, createTable, retryTxn, translateDomain } from './utils' export { getDBClient, convertDoc, createTable, retryTxn, translateDomain, getDocFieldsByDomains } from './utils'

View File

@ -0,0 +1,36 @@
import { DOMAIN_SPACE } from '@hcengineering/core'
type DataType = 'bigint' | 'bool' | 'text' | 'text[]'
type Schema = Record<string, [DataType, boolean]>
export const defaultSchema: Schema = {
_id: ['text', true],
_class: ['text', true],
space: ['text', true],
modifiedBy: ['text', true],
createdBy: ['text', false],
modifiedOn: ['bigint', true],
createdOn: ['bigint', false],
attachedTo: ['text', false]
}
export const spaceSchema: Schema = {
_id: ['text', true],
_class: ['text', true],
space: ['text', true],
modifiedBy: ['text', true],
createdBy: ['text', false],
modifiedOn: ['bigint', true],
createdOn: ['bigint', false],
private: ['bool', true],
members: ['text[]', true]
}
export const domainSchemas: Record<string, Schema> = {
[DOMAIN_SPACE]: spaceSchema
}
export function getSchema (domain: string): Schema {
return domainSchemas[domain] ?? defaultSchema
}

View File

@ -66,33 +66,41 @@ import {
} from '@hcengineering/server-core' } from '@hcengineering/server-core'
import { createHash } from 'crypto' import { createHash } from 'crypto'
import { type Pool, type PoolClient } from 'pg' import { type Pool, type PoolClient } from 'pg'
import { type ValueType } from './types'
import { import {
convertDoc, convertDoc,
createTable, createTable,
DBCollectionHelper, DBCollectionHelper,
docFields,
escapeBackticks, escapeBackticks,
getDBClient, getDBClient,
getUpdateValue, getDocFieldsByDomains,
isDataField, isDataField,
isOwner, isOwner,
type JoinProps, type JoinProps,
Mutex,
parseDoc, parseDoc,
parseDocWithProjection, parseDocWithProjection,
parseUpdate,
type PostgresClientReference, type PostgresClientReference,
retryTxn,
translateDomain translateDomain
} from './utils' } from './utils'
abstract class PostgresAdapterBase implements DbAdapter { abstract class PostgresAdapterBase implements DbAdapter {
protected readonly _helper: DBCollectionHelper protected readonly _helper: DBCollectionHelper
protected readonly tableFields = new Map<string, string[]>() protected readonly tableFields = new Map<string, string[]>()
protected readonly retryTxn = async (fn: (client: PoolClient) => Promise<any>): Promise<any> => { protected readonly queue: ((client: PoolClient) => Promise<any>)[] = []
return await retryTxn(this.client, fn) private readonly mutex = new Mutex()
protected readonly retryTxn = async (fn: (client: PoolClient) => Promise<any>): Promise<void> => {
await this.mutex.runExclusive(async () => {
await this.processOps(this.txConnection, fn)
})
} }
constructor ( constructor (
protected readonly client: Pool, protected readonly client: Pool,
protected readonly connection: PoolClient,
protected readonly txConnection: PoolClient,
protected readonly refClient: PostgresClientReference, protected readonly refClient: PostgresClientReference,
protected readonly workspaceId: WorkspaceId, protected readonly workspaceId: WorkspaceId,
protected readonly hierarchy: Hierarchy, protected readonly hierarchy: Hierarchy,
@ -101,6 +109,33 @@ abstract class PostgresAdapterBase implements DbAdapter {
this._helper = new DBCollectionHelper(this.client, this.workspaceId) this._helper = new DBCollectionHelper(this.client, this.workspaceId)
} }
private async processOps (client: PoolClient, operation: (client: PoolClient) => Promise<any>): Promise<void> {
const backoffInterval = 100 // millis
const maxTries = 5
let tries = 0
while (true) {
await client.query('BEGIN;')
tries++
try {
const result = await operation(client)
await client.query('COMMIT;')
return result
} catch (err: any) {
await client.query('ROLLBACK;')
if (err.code !== '40001' || tries === maxTries) {
throw err
} else {
console.log('Transaction failed. Retrying.')
console.log(err.message)
await new Promise((resolve) => setTimeout(resolve, tries * backoffInterval))
}
}
}
}
async traverse<T extends Doc>( async traverse<T extends Doc>(
_domain: Domain, _domain: Domain,
query: DocumentQuery<T>, query: DocumentQuery<T>,
@ -163,6 +198,8 @@ abstract class PostgresAdapterBase implements DbAdapter {
abstract init (): Promise<void> abstract init (): Promise<void>
async close (): Promise<void> { async close (): Promise<void> {
this.txConnection.release()
this.connection.release()
this.refClient.close() this.refClient.close()
} }
@ -178,7 +215,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
sqlChunks.push(`LIMIT ${options.limit}`) sqlChunks.push(`LIMIT ${options.limit}`)
} }
const finalSql: string = [select, ...sqlChunks].join(' ') const finalSql: string = [select, ...sqlChunks].join(' ')
const result = await this.client.query(finalSql) const result = await this.connection.query(finalSql)
return result.rows.map((p) => parseDocWithProjection(p, options?.projection)) return result.rows.map((p) => parseDocWithProjection(p, options?.projection))
} }
@ -190,7 +227,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
continue continue
} }
if (typeof val === 'number') { if (typeof val === 'number') {
res.push(`${this.transformKey(core.class.Doc, key, false)} ${val === 1 ? 'ASC' : 'DESC'}`) res.push(`${this.transformKey(domain, core.class.Doc, key, false)} ${val === 1 ? 'ASC' : 'DESC'}`)
} else { } else {
// todo handle custom sorting // todo handle custom sorting
} }
@ -203,8 +240,8 @@ abstract class PostgresAdapterBase implements DbAdapter {
res.push(`"workspaceId" = '${this.workspaceId.name}'`) res.push(`"workspaceId" = '${this.workspaceId.name}'`)
for (const key in query) { for (const key in query) {
const value = query[key] const value = query[key]
const tkey = this.transformKey(core.class.Doc, key, false) const tkey = this.transformKey(domain, core.class.Doc, key, false)
const translated = this.translateQueryValue(tkey, value, false) const translated = this.translateQueryValue(tkey, value, 'common')
if (translated !== undefined) { if (translated !== undefined) {
res.push(translated) res.push(translated)
} }
@ -231,18 +268,26 @@ abstract class PostgresAdapterBase implements DbAdapter {
if (doc === undefined) continue if (doc === undefined) continue
const prevAttachedTo = (doc as any).attachedTo const prevAttachedTo = (doc as any).attachedTo
TxProcessor.applyUpdate(doc, operations) TxProcessor.applyUpdate(doc, operations)
const converted = convertDoc(doc, this.workspaceId.name) const converted = convertDoc(domain, doc, this.workspaceId.name)
const updates: string[] = [] let paramsIndex = 3
const { space, attachedTo, ...ops } = operations as any
const params: any[] = [doc._id, this.workspaceId.name] const params: any[] = [doc._id, this.workspaceId.name]
if (space !== undefined) { const updates: string[] = []
updates.push(`space = '${space}'`) const { extractedFields, remainingData } = parseUpdate(domain, operations)
const newAttachedTo = (doc as any).attachedTo
if (Object.keys(extractedFields).length > 0) {
for (const key in extractedFields) {
const val = (extractedFields as any)[key]
if (key === 'attachedTo' && val === prevAttachedTo) continue
updates.push(`"${key}" = $${paramsIndex++}`)
params.push(val)
}
} else if (prevAttachedTo !== undefined && prevAttachedTo !== newAttachedTo) {
updates.push(`"attachedTo" = $${paramsIndex++}`)
params.push(newAttachedTo)
} }
if ((doc as any).attachedTo !== prevAttachedTo) {
updates.push(`"attachedTo" = ${attachedTo != null ? "'" + attachedTo + "'" : 'NULL'}`) if (Object.keys(remainingData).length > 0) {
} updates.push(`data = $${paramsIndex++}`)
if (Object.keys(ops).length > 0) {
updates.push('data = $3')
params.push(converted.data) params.push(converted.data)
} }
await client.query( await client.query(
@ -278,7 +323,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
if (options?.total === true) { if (options?.total === true) {
const totalReq = `SELECT COUNT(${domain}._id) as count FROM ${domain}` const totalReq = `SELECT COUNT(${domain}._id) as count FROM ${domain}`
const totalSql = [totalReq, ...sqlChunks].join(' ') const totalSql = [totalReq, ...sqlChunks].join(' ')
const totalResult = await this.client.query(totalSql) const totalResult = await this.connection.query(totalSql)
const parsed = Number.parseInt(totalResult.rows[0]?.count ?? '') const parsed = Number.parseInt(totalResult.rows[0]?.count ?? '')
total = Number.isNaN(parsed) ? 0 : parsed total = Number.isNaN(parsed) ? 0 : parsed
} }
@ -290,7 +335,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
} }
const finalSql: string = [select, ...sqlChunks].join(' ') const finalSql: string = [select, ...sqlChunks].join(' ')
const result = await this.client.query(finalSql) const result = await this.connection.query(finalSql)
if (options?.lookup === undefined) { if (options?.lookup === undefined) {
return toFindResult( return toFindResult(
result.rows.map((p) => parseDocWithProjection(p, options?.projection)), result.rows.map((p) => parseDocWithProjection(p, options?.projection)),
@ -315,9 +360,9 @@ abstract class PostgresAdapterBase implements DbAdapter {
return return
} }
if (query.space === acc._id) return if (query.space === acc._id) return
const key = domain === DOMAIN_SPACE ? '_id' : domain === DOMAIN_TX ? 'data ->> "objectSpace"' : 'space' const key = domain === DOMAIN_SPACE ? '_id' : domain === DOMAIN_TX ? "data ->> 'objectSpace'" : 'space'
const privateCheck = domain === DOMAIN_SPACE ? " OR sec.data ->> 'private' = 'false'" : '' const privateCheck = domain === DOMAIN_SPACE ? ' OR sec.private = false' : ''
const q = `(sec.data -> 'members' @> '"${acc._id}"' OR sec."_class" = '${core.class.SystemSpace}'${privateCheck})` const q = `(sec.members @> '{"${acc._id}"}' OR sec."_class" = '${core.class.SystemSpace}'${privateCheck})`
return `INNER JOIN ${translateDomain(DOMAIN_SPACE)} AS sec ON sec._id = ${domain}.${key} AND sec."workspaceId" = '${this.workspaceId.name}' AND ${q}` return `INNER JOIN ${translateDomain(DOMAIN_SPACE)} AS sec ON sec._id = ${domain}.${key} AND sec."workspaceId" = '${this.workspaceId.name}' AND ${q}`
} }
} }
@ -545,7 +590,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
const _class = Array.isArray(value) ? value[0] : value const _class = Array.isArray(value) ? value[0] : value
const nested = Array.isArray(value) ? value[1] : undefined const nested = Array.isArray(value) ? value[1] : undefined
const domain = translateDomain(this.hierarchy.getDomain(_class)) const domain = translateDomain(this.hierarchy.getDomain(_class))
const tkey = domain === DOMAIN_MODEL ? key : this.transformKey(clazz, key) const tkey = domain === DOMAIN_MODEL ? key : this.transformKey(baseDomain, clazz, key)
const as = `lookup_${domain}_${parentKey !== undefined ? parentKey + '_lookup_' + key : key}` const as = `lookup_${domain}_${parentKey !== undefined ? parentKey + '_lookup_' + key : key}`
res.push({ res.push({
isReverse: false, isReverse: false,
@ -643,9 +688,9 @@ abstract class PostgresAdapterBase implements DbAdapter {
} }
const value = query[key] const value = query[key]
if (value === undefined) continue if (value === undefined) continue
const isDataArray = this.checkDataArray(_class, key) const valueType = this.getValueType(_class, key)
const tkey = this.getKey(_class, baseDomain, key, joins, isDataArray) const tkey = this.getKey(_class, baseDomain, key, joins, valueType === 'dataArray')
const translated = this.translateQueryValue(tkey, value, isDataArray) const translated = this.translateQueryValue(tkey, value, valueType)
if (translated !== undefined) { if (translated !== undefined) {
res.push(translated) res.push(translated)
} }
@ -653,22 +698,23 @@ abstract class PostgresAdapterBase implements DbAdapter {
return res.join(' AND ') return res.join(' AND ')
} }
private checkDataArray<T extends Doc>(_class: Ref<Class<T>>, key: string): boolean { private getValueType<T extends Doc>(_class: Ref<Class<T>>, key: string): ValueType {
const splitted = key.split('.') const splitted = key.split('.')
const mixinOrKey = splitted[0] const mixinOrKey = splitted[0]
const domain = this.hierarchy.getDomain(_class)
if (this.hierarchy.isMixin(mixinOrKey as Ref<Class<Doc>>)) { if (this.hierarchy.isMixin(mixinOrKey as Ref<Class<Doc>>)) {
key = splitted.slice(1).join('.') key = splitted.slice(1).join('.')
const attr = this.hierarchy.findAttribute(mixinOrKey as Ref<Class<Doc>>, key) const attr = this.hierarchy.findAttribute(mixinOrKey as Ref<Class<Doc>>, key)
if (attr !== undefined) { if (attr !== undefined && attr.type._class === core.class.ArrOf) {
return attr.type._class === core.class.ArrOf return isDataField(domain, key) ? 'dataArray' : 'array'
} }
return false return 'common'
} else { } else {
const attr = this.hierarchy.findAttribute(_class, key) const attr = this.hierarchy.findAttribute(_class, key)
if (attr !== undefined) { if (attr !== undefined && attr.type._class === core.class.ArrOf) {
return attr.type._class === core.class.ArrOf return isDataField(domain, key) ? 'dataArray' : 'array'
} }
return false return 'common'
} }
} }
@ -731,12 +777,12 @@ abstract class PostgresAdapterBase implements DbAdapter {
isDataArray: boolean = false isDataArray: boolean = false
): string { ): string {
if (key.startsWith('$lookup')) { if (key.startsWith('$lookup')) {
return this.transformLookupKey(key, joins, isDataArray) return this.transformLookupKey(baseDomain, key, joins, isDataArray)
} }
return `${baseDomain}.${this.transformKey(_class, key, isDataArray)}` return `${baseDomain}.${this.transformKey(baseDomain, _class, key, isDataArray)}`
} }
private transformLookupKey (key: string, joins: JoinProps[], isDataArray: boolean = false): string { private transformLookupKey (domain: string, key: string, joins: JoinProps[], isDataArray: boolean = false): string {
const arr = key.split('.').filter((p) => p !== '$lookup') const arr = key.split('.').filter((p) => p !== '$lookup')
const tKey = arr.pop() ?? '' const tKey = arr.pop() ?? ''
const path = arr.join('.') const path = arr.join('.')
@ -747,12 +793,17 @@ abstract class PostgresAdapterBase implements DbAdapter {
if (join.isReverse) { if (join.isReverse) {
return `${join.toAlias}->'${tKey}'` return `${join.toAlias}->'${tKey}'`
} }
const res = isDataField(tKey) ? (isDataArray ? `data->'${tKey}'` : `data#>>'{${tKey}}'`) : key const res = isDataField(domain, tKey) ? (isDataArray ? `data->'${tKey}'` : `data#>>'{${tKey}}'`) : key
return `${join.toAlias}.${res}` return `${join.toAlias}.${res}`
} }
private transformKey<T extends Doc>(_class: Ref<Class<T>>, key: string, isDataArray: boolean = false): string { private transformKey<T extends Doc>(
if (!isDataField(key)) return `"${key}"` domain: string,
_class: Ref<Class<T>>,
key: string,
isDataArray: boolean = false
): string {
if (!isDataField(domain, key)) return `"${key}"`
const arr = key.split('.').filter((p) => p) const arr = key.split('.').filter((p) => p)
let tKey = '' let tKey = ''
let isNestedField = false let isNestedField = false
@ -799,7 +850,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
return key return key
} }
private translateQueryValue (tkey: string, value: any, isDataArray: boolean): string | undefined { private translateQueryValue (tkey: string, value: any, type: ValueType): string | undefined {
if (value === null) { if (value === null) {
return `${tkey} IS NULL` return `${tkey} IS NULL`
} else if (typeof value === 'object' && !Array.isArray(value)) { } else if (typeof value === 'object' && !Array.isArray(value)) {
@ -825,7 +876,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
break break
case '$in': case '$in':
res.push( res.push(
isDataArray type !== 'common'
? `${tkey} ?| array[${val.length > 0 ? val.map((v: any) => `'${v}'`).join(', ') : 'NULL'}]` ? `${tkey} ?| array[${val.length > 0 ? val.map((v: any) => `'${v}'`).join(', ') : 'NULL'}]`
: `${tkey} IN (${val.length > 0 ? val.map((v: any) => `'${v}'`).join(', ') : 'NULL'})` : `${tkey} IN (${val.length > 0 ? val.map((v: any) => `'${v}'`).join(', ') : 'NULL'})`
) )
@ -856,9 +907,11 @@ abstract class PostgresAdapterBase implements DbAdapter {
} }
return res.length === 0 ? undefined : res.join(' AND ') return res.length === 0 ? undefined : res.join(' AND ')
} }
return isDataArray return type === 'common'
? `${tkey} @> '${typeof value === 'string' ? '"' + value + '"' : value}'` ? `${tkey} = '${value}'`
: `${tkey} = '${value}'` : type === 'array'
? `${tkey} @> '${typeof value === 'string' ? '{"' + value + '"}' : value}'`
: `${tkey} @> '${typeof value === 'string' ? '"' + value + '"' : value}'`
} }
private getProjectionsAliases (join: JoinProps): string[] { private getProjectionsAliases (join: JoinProps): string[] {
@ -876,8 +929,9 @@ abstract class PostgresAdapterBase implements DbAdapter {
`(SELECT jsonb_agg(${join.toAlias}.*) FROM ${join.table} AS ${join.toAlias} WHERE ${join.fromAlias}.${join.fromField} = ${join.toAlias}."${join.toField}" ${classsesQuery}) AS ${join.toAlias}` `(SELECT jsonb_agg(${join.toAlias}.*) FROM ${join.table} AS ${join.toAlias} WHERE ${join.fromAlias}.${join.fromField} = ${join.toAlias}."${join.toField}" ${classsesQuery}) AS ${join.toAlias}`
] ]
} }
const fields = getDocFieldsByDomains(join.table)
const res: string[] = [] const res: string[] = []
for (const key of [...docFields, 'data']) { for (const key of [...fields, 'data']) {
res.push(`${join.toAlias}."${key}" as "lookup_${join.path.replaceAll('.', '_')}_${key}"`) res.push(`${join.toAlias}."${key}" as "lookup_${join.path.replaceAll('.', '_')}_${key}"`)
} }
return res return res
@ -897,7 +951,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
res.push(`${baseDomain}.*`) res.push(`${baseDomain}.*`)
} else { } else {
for (const key in projection) { for (const key in projection) {
if (isDataField(key)) { if (isDataField(baseDomain, key)) {
if (!dataAdded) { if (!dataAdded) {
res.push(`${baseDomain}.data as data`) res.push(`${baseDomain}.data as data`)
dataAdded = true dataAdded = true
@ -1046,7 +1100,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
if (docs.length === 0) { if (docs.length === 0) {
return [] return []
} }
const res = await this.client.query( const res = await this.connection.query(
`SELECT * FROM ${translateDomain(domain)} WHERE _id = ANY($1) AND "workspaceId" = $2`, `SELECT * FROM ${translateDomain(domain)} WHERE _id = ANY($1) AND "workspaceId" = $2`,
[docs, this.workspaceId.name] [docs, this.workspaceId.name]
) )
@ -1056,37 +1110,59 @@ abstract class PostgresAdapterBase implements DbAdapter {
async upload (ctx: MeasureContext, domain: Domain, docs: Doc[]): Promise<void> { async upload (ctx: MeasureContext, domain: Domain, docs: Doc[]): Promise<void> {
const arr = docs.concat() const arr = docs.concat()
return await this.retryTxn(async (client) => { const fields = getDocFieldsByDomains(domain)
while (arr.length > 0) { const filedsWithData = [...fields, 'data']
const part = arr.splice(0, 500) const insertFields: string[] = []
const vals = part const onConflict: string[] = []
.map((doc) => { for (const field of filedsWithData) {
const d = convertDoc(doc, this.workspaceId.name) insertFields.push(`"${field}"`)
return `('${d._id}', '${d.workspaceId}', '${d._class}', '${d.createdBy ?? d.modifiedBy}', '${d.modifiedBy}', ${d.modifiedOn}, ${d.createdOn ?? d.modifiedOn}, '${d.space}', ${ onConflict.push(`"${field}" = EXCLUDED."${field}"`)
d.attachedTo != null ? `'${d.attachedTo}'` : 'NULL' }
}, '${escapeBackticks(JSON.stringify(d.data))}')` const insertStr = insertFields.join(', ')
}) const onConflictStr = onConflict.join(', ')
.join(', ') while (arr.length > 0) {
await client.query( const part = arr.splice(0, 500)
`INSERT INTO ${translateDomain(domain)} (_id, "workspaceId", _class, "createdBy", "modifiedBy", "modifiedOn", "createdOn", space, "attachedTo", data) VALUES ${vals} const values: any[] = []
ON CONFLICT (_id, "workspaceId") DO UPDATE SET _class = EXCLUDED._class, "createdBy" = EXCLUDED."createdBy", "modifiedBy" = EXCLUDED."modifiedBy", "modifiedOn" = EXCLUDED."modifiedOn", "createdOn" = EXCLUDED."createdOn", space = EXCLUDED.space, "attachedTo" = EXCLUDED."attachedTo", data = EXCLUDED.data;` const vars: string[] = []
) let index = 1
for (let i = 0; i < part.length; i++) {
const doc = part[i]
const variables: string[] = []
const d = convertDoc(domain, doc, this.workspaceId.name)
values.push(d.workspaceId)
variables.push(`$${index++}`)
for (const field of fields) {
values.push(d[field])
variables.push(`$${index++}`)
}
values.push(d.data)
variables.push(`$${index++}`)
vars.push(`(${variables.join(', ')})`)
} }
})
const vals = vars.join(',')
await this.retryTxn(async (client) => {
await client.query(
`INSERT INTO ${translateDomain(domain)} ("workspaceId", ${insertStr}) VALUES ${vals}
ON CONFLICT ("workspaceId", _id) DO UPDATE SET ${onConflictStr};`,
values
)
})
}
} }
async clean (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<void> { async clean (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<void> {
await this.client.query(`DELETE FROM ${translateDomain(domain)} WHERE _id = ANY($1) AND "workspaceId" = $2`, [ await this.connection.query(`DELETE FROM ${translateDomain(domain)} WHERE _id = ANY($1) AND "workspaceId" = $2`, [
docs, docs,
this.workspaceId.name this.workspaceId.name
]) ])
} }
async groupBy<T>(ctx: MeasureContext, domain: Domain, field: string): Promise<Set<T>> { async groupBy<T>(ctx: MeasureContext, domain: Domain, field: string): Promise<Set<T>> {
const key = isDataField(field) ? `data ->> '${field}'` : `"${field}"` const key = isDataField(domain, field) ? `data ->> '${field}'` : `"${field}"`
const result = await ctx.with('groupBy', { domain }, async (ctx) => { const result = await ctx.with('groupBy', { domain }, async (ctx) => {
try { try {
const result = await this.client.query( const result = await this.connection.query(
`SELECT DISTINCT ${key} as ${field} FROM ${translateDomain(domain)} WHERE "workspaceId" = $1`, `SELECT DISTINCT ${key} as ${field} FROM ${translateDomain(domain)} WHERE "workspaceId" = $1`,
[this.workspaceId.name] [this.workspaceId.name]
) )
@ -1117,19 +1193,18 @@ abstract class PostgresAdapterBase implements DbAdapter {
;(op as any)['%hash%'] = null ;(op as any)['%hash%'] = null
} }
TxProcessor.applyUpdate(doc, op) TxProcessor.applyUpdate(doc, op)
const converted = convertDoc(doc, this.workspaceId.name) const converted = convertDoc(domain, doc, this.workspaceId.name)
const updates: string[] = [] const updates: string[] = []
const { space, attachedTo, ...data } = op as any let paramsIndex = 3
const { extractedFields, remainingData } = parseUpdate(domain, op)
const params: any[] = [doc._id, this.workspaceId.name] const params: any[] = [doc._id, this.workspaceId.name]
if (space !== undefined) { for (const key in extractedFields) {
updates.push(`space = '${space}'`) updates.push(`"${key}" = $${paramsIndex++}`)
params.push((extractedFields as any)[key])
} }
if (attachedTo !== undefined) { if (Object.keys(remainingData).length > 0) {
updates.push(`"attachedTo" = ${attachedTo != null ? "'" + attachedTo + "'" : 'NULL'}`) updates.push(`data = $${paramsIndex++}`)
}
if (Object.keys(data).length > 0) {
updates.push('data = $3')
params.push(converted.data) params.push(converted.data)
} }
await client.query( await client.query(
@ -1145,22 +1220,41 @@ abstract class PostgresAdapterBase implements DbAdapter {
} }
async insert (domain: string, docs: Doc[]): Promise<TxResult> { async insert (domain: string, docs: Doc[]): Promise<TxResult> {
return await this.retryTxn(async (client) => { const fields = getDocFieldsByDomains(domain)
while (docs.length > 0) { const filedsWithData = [...fields, 'data']
const part = docs.splice(0, 500) const insertFields: string[] = []
const vals = part for (const field of filedsWithData) {
.map((doc) => { insertFields.push(`"${field}"`)
const d = convertDoc(doc, this.workspaceId.name) }
return `('${d._id}', '${d.workspaceId}', '${d._class}', '${d.createdBy ?? d.modifiedBy}', '${d.modifiedBy}', ${d.modifiedOn}, ${d.createdOn ?? d.modifiedOn}, '${d.space}', ${ const insertStr = insertFields.join(', ')
d.attachedTo != null ? `'${d.attachedTo}'` : 'NULL' while (docs.length > 0) {
}, '${escapeBackticks(JSON.stringify(d.data))}')` const part = docs.splice(0, 500)
}) const values: any[] = []
.join(', ') const vars: string[] = []
await client.query( let index = 1
`INSERT INTO ${translateDomain(domain)} (_id, "workspaceId", _class, "createdBy", "modifiedBy", "modifiedOn", "createdOn", space, "attachedTo", data) VALUES ${vals}` for (let i = 0; i < part.length; i++) {
) const doc = part[i]
const variables: string[] = []
const d = convertDoc(domain, doc, this.workspaceId.name)
values.push(d.workspaceId)
variables.push(`$${index++}`)
for (const field of fields) {
values.push(d[field])
variables.push(`$${index++}`)
}
values.push(d.data)
variables.push(`$${index++}`)
vars.push(`(${variables.join(', ')})`)
} }
}) const vals = vars.join(',')
await this.retryTxn(async (client) => {
await client.query(
`INSERT INTO ${translateDomain(domain)} ("workspaceId", ${insertStr}) VALUES ${vals}`,
values
)
})
}
return {}
} }
} }
@ -1218,18 +1312,30 @@ class PostgresAdapter extends PostgresAdapterBase {
} }
private async txMixin (ctx: MeasureContext, tx: TxMixin<Doc, Doc>): Promise<TxResult> { private async txMixin (ctx: MeasureContext, tx: TxMixin<Doc, Doc>): Promise<TxResult> {
return await ctx.with('tx-mixin', { _class: tx.objectClass, mixin: tx.mixin }, async () => { await ctx.with('tx-mixin', { _class: tx.objectClass, mixin: tx.mixin }, async () => {
return await this.retryTxn(async (client) => { await this.retryTxn(async (client) => {
const doc = await this.findDoc(ctx, client, tx.objectClass, tx.objectId, true) const doc = await this.findDoc(ctx, client, tx.objectClass, tx.objectId, true)
if (doc === undefined) return {} if (doc === undefined) return
TxProcessor.updateMixin4Doc(doc, tx) TxProcessor.updateMixin4Doc(doc, tx)
const converted = convertDoc(doc, this.workspaceId.name) const domain = this.hierarchy.getDomain(tx.objectClass)
const converted = convertDoc(domain, doc, this.workspaceId.name)
const updates: string[] = ['"modifiedBy" = $1', '"modifiedOn" = $2']
let paramsIndex = 5
const { extractedFields } = parseUpdate(domain, tx.attributes as Partial<Doc>)
const params: any[] = [tx.modifiedBy, tx.modifiedOn, tx.objectId, this.workspaceId.name]
for (const key in extractedFields) {
updates.push(`"${key}" = $${paramsIndex++}`)
params.push(converted[key])
}
updates.push(`data = $${paramsIndex++}`)
params.push(converted.data)
await client.query( await client.query(
`UPDATE ${translateDomain(this.hierarchy.getDomain(tx.objectClass))} SET "modifiedBy" = $1, "modifiedOn" = $2, data = $5 WHERE _id = $3 AND "workspaceId" = $4`, `UPDATE ${translateDomain(domain)} SET ${updates.join(', ')} WHERE _id = $3 AND "workspaceId" = $4`,
[tx.modifiedBy, tx.modifiedOn, tx.objectId, this.workspaceId.name, converted.data] params
) )
}) })
}) })
return {}
} }
async tx (ctx: MeasureContext, ...txes: Tx[]): Promise<TxResult[]> { async tx (ctx: MeasureContext, ...txes: Tx[]): Promise<TxResult[]> {
@ -1277,22 +1383,22 @@ class PostgresAdapter extends PostgresAdapterBase {
doc = await this.findDoc(ctx, client, tx.objectClass, tx.objectId, true) doc = await this.findDoc(ctx, client, tx.objectClass, tx.objectId, true)
if (doc === undefined) return {} if (doc === undefined) return {}
TxProcessor.applyUpdate(doc, ops) TxProcessor.applyUpdate(doc, ops)
const converted = convertDoc(doc, this.workspaceId.name) const domain = this.hierarchy.getDomain(tx.objectClass)
const converted = convertDoc(domain, doc, this.workspaceId.name)
const updates: string[] = ['"modifiedBy" = $1', '"modifiedOn" = $2'] const updates: string[] = ['"modifiedBy" = $1', '"modifiedOn" = $2']
const { space, attachedTo, ...data } = ops as any let paramsIndex = 5
const { extractedFields, remainingData } = parseUpdate(domain, ops)
const params: any[] = [tx.modifiedBy, tx.modifiedOn, tx.objectId, this.workspaceId.name] const params: any[] = [tx.modifiedBy, tx.modifiedOn, tx.objectId, this.workspaceId.name]
if (space !== undefined) { for (const key in extractedFields) {
updates.push(`space = '${space}'`) updates.push(`"${key}" = $${paramsIndex++}`)
params.push(converted[key])
} }
if (attachedTo !== undefined) { if (Object.keys(remainingData).length > 0) {
updates.push(`"attachedTo" = ${attachedTo != null ? "'" + attachedTo + "'" : 'NULL'}`) updates.push(`data = $${paramsIndex++}`)
}
if (Object.keys(data).length > 0) {
updates.push('data = $5')
params.push(converted.data) params.push(converted.data)
} }
await client.query( await client.query(
`UPDATE ${translateDomain(this.hierarchy.getDomain(tx.objectClass))} SET ${updates.join(', ')} WHERE _id = $3 AND "workspaceId" = $4`, `UPDATE ${translateDomain(domain)} SET ${updates.join(', ')} WHERE _id = $3 AND "workspaceId" = $4`,
params params
) )
}) })
@ -1315,21 +1421,24 @@ class PostgresAdapter extends PostgresAdapterBase {
): Promise<TxResult> { ): Promise<TxResult> {
return await ctx.with('update jsonb_set', {}, async () => { return await ctx.with('update jsonb_set', {}, async () => {
const updates: string[] = ['"modifiedBy" = $1', '"modifiedOn" = $2'] const updates: string[] = ['"modifiedBy" = $1', '"modifiedOn" = $2']
const params: any[] = [tx.modifiedBy, tx.modifiedOn, tx.objectId, this.workspaceId.name]
let paramsIndex = 5
const domain = this.hierarchy.getDomain(tx.objectClass)
const { extractedFields, remainingData } = parseUpdate(domain, tx.operations)
const { space, attachedTo, ...ops } = tx.operations as any const { space, attachedTo, ...ops } = tx.operations as any
if (ops['%hash%'] === undefined) { if (ops['%hash%'] === undefined) {
ops['%hash%'] = null ops['%hash%'] = null
} }
if (space !== undefined) { for (const key in extractedFields) {
updates.push(`space = '${space}'`) updates.push(`"${key}" = $${paramsIndex++}`)
} params.push((extractedFields as any)[key])
if (attachedTo !== undefined) {
updates.push(`"attachedTo" = ${attachedTo != null ? "'" + attachedTo + "'" : 'NULL'}`)
} }
let from = 'data' let from = 'data'
let dataUpdated = false let dataUpdated = false
for (const key in ops) { for (const key in remainingData) {
if (ops[key] === undefined) continue if (ops[key] === undefined) continue
from = `jsonb_set(${from}, '{${key}}', '${getUpdateValue(ops[key])}', true)` from = `jsonb_set(${from}, '{${key}}', $${paramsIndex++}::jsonb, true)`
params.push(JSON.stringify((remainingData as any)[key]))
dataUpdated = true dataUpdated = true
} }
if (dataUpdated) { if (dataUpdated) {
@ -1340,13 +1449,13 @@ class PostgresAdapter extends PostgresAdapterBase {
await this.retryTxn(async (client) => { await this.retryTxn(async (client) => {
await client.query( await client.query(
`UPDATE ${translateDomain(this.hierarchy.getDomain(tx.objectClass))} SET ${updates.join(', ')} WHERE _id = $3 AND "workspaceId" = $4`, `UPDATE ${translateDomain(this.hierarchy.getDomain(tx.objectClass))} SET ${updates.join(', ')} WHERE _id = $3 AND "workspaceId" = $4`,
[tx.modifiedBy, tx.modifiedOn, tx.objectId, this.workspaceId.name] params
) )
if (retrieve) {
const object = await this.findDoc(ctx, client, tx.objectClass, tx.objectId)
return { object }
}
}) })
if (retrieve) {
const object = await this.findDoc(ctx, this.connection, tx.objectClass, tx.objectId)
return { object }
}
} catch (err) { } catch (err) {
console.error(err) console.error(err)
} }
@ -1373,15 +1482,16 @@ class PostgresAdapter extends PostgresAdapterBase {
} }
protected async txRemoveDoc (ctx: MeasureContext, tx: TxRemoveDoc<Doc>): Promise<TxResult> { protected async txRemoveDoc (ctx: MeasureContext, tx: TxRemoveDoc<Doc>): Promise<TxResult> {
return await ctx.with('tx-remove-doc', { _class: tx.objectClass }, async () => { await ctx.with('tx-remove-doc', { _class: tx.objectClass }, async () => {
const domain = translateDomain(this.hierarchy.getDomain(tx.objectClass)) const domain = translateDomain(this.hierarchy.getDomain(tx.objectClass))
return await this.retryTxn(async (client) => { await this.retryTxn(async (client) => {
await client.query(`DELETE FROM ${domain} WHERE _id = $1 AND "workspaceId" = $2`, [ await client.query(`DELETE FROM ${domain} WHERE _id = $1 AND "workspaceId" = $2`, [
tx.objectId, tx.objectId,
this.workspaceId.name this.workspaceId.name
]) ])
}) })
}) })
return {}
} }
} }
@ -1405,8 +1515,8 @@ class PostgresTxAdapter extends PostgresAdapterBase implements TxAdapter {
} }
async getModel (ctx: MeasureContext): Promise<Tx[]> { async getModel (ctx: MeasureContext): Promise<Tx[]> {
const res = await this.client.query( const res = await this.connection.query(
`SELECT * FROM ${translateDomain(DOMAIN_TX)} WHERE "workspaceId" = '${this.workspaceId.name}' AND data->>'objectSpace' = '${core.space.Model}' ORDER BY _id ASC, "modifiedOn" ASC` `SELECT * FROM ${translateDomain(DOMAIN_TX)} WHERE "workspaceId" = '${this.workspaceId.name}' AND data->>'objectSpace' = '${core.space.Model}' ORDER BY _id ASC, "modifiedOn" ASC`
) )
const model = res.rows.map((p) => parseDoc<Tx>(p)) const model = res.rows.map((p) => parseDoc<Tx>(p))
// We need to put all core.account.System transactions first // We need to put all core.account.System transactions first
@ -1428,7 +1538,10 @@ export async function createPostgresAdapter (
modelDb: ModelDb modelDb: ModelDb
): Promise<DbAdapter> { ): Promise<DbAdapter> {
const client = getDBClient(url) const client = getDBClient(url)
const adapter = new PostgresAdapter(await client.getClient(), client, workspaceId, hierarchy, modelDb) const pool = await client.getClient()
const mainConnection = await pool.connect()
const txConnection = await pool.connect()
const adapter = new PostgresAdapter(pool, mainConnection, txConnection, client, workspaceId, hierarchy, modelDb)
return adapter return adapter
} }
@ -1443,7 +1556,10 @@ export async function createPostgresTxAdapter (
modelDb: ModelDb modelDb: ModelDb
): Promise<TxAdapter> { ): Promise<TxAdapter> {
const client = getDBClient(url) const client = getDBClient(url)
const adapter = new PostgresTxAdapter(await client.getClient(), client, workspaceId, hierarchy, modelDb) const pool = await client.getClient()
const mainConnection = await pool.connect()
const txConnection = await pool.connect()
const adapter = new PostgresTxAdapter(pool, mainConnection, txConnection, client, workspaceId, hierarchy, modelDb)
await adapter.init() await adapter.init()
return adapter return adapter
} }

View File

@ -0,0 +1 @@
export type ValueType = 'common' | 'array' | 'dataArray'

View File

@ -18,9 +18,11 @@ import core, {
AccountRole, AccountRole,
type Class, type Class,
type Doc, type Doc,
type DocumentUpdate,
type Domain, type Domain,
type FieldIndexConfig, type FieldIndexConfig,
generateId, generateId,
type MixinUpdate,
type Projection, type Projection,
type Ref, type Ref,
type WorkspaceId type WorkspaceId
@ -28,6 +30,7 @@ import core, {
import { PlatformError, unknownStatus } from '@hcengineering/platform' import { PlatformError, unknownStatus } from '@hcengineering/platform'
import { type DomainHelperOperations } from '@hcengineering/server-core' import { type DomainHelperOperations } from '@hcengineering/server-core'
import { Pool, type PoolClient } from 'pg' import { Pool, type PoolClient } from 'pg'
import { defaultSchema, domainSchemas, getSchema } from './schemas'
const connections = new Map<string, PostgresClientReferenceImpl>() const connections = new Map<string, PostgresClientReferenceImpl>()
@ -87,24 +90,26 @@ export async function createTable (client: Pool, domains: string[]): Promise<voi
const toCreate = mapped.filter((it) => !exists.rows.map((it) => it.table_name).includes(it)) const toCreate = mapped.filter((it) => !exists.rows.map((it) => it.table_name).includes(it))
await retryTxn(client, async (client) => { await retryTxn(client, async (client) => {
for (const domain of toCreate) { for (const domain of toCreate) {
const schema = getSchema(domain)
const fields: string[] = []
for (const key in schema) {
const val = schema[key]
fields.push(`"${key}" ${val[0]} ${val[1] ? 'NOT NULL' : ''}`)
}
const colums = fields.join(', ')
await client.query( await client.query(
`CREATE TABLE ${domain} ( `CREATE TABLE ${domain} (
"workspaceId" VARCHAR(255) NOT NULL, "workspaceId" text NOT NULL,
_id VARCHAR(255) NOT NULL, ${colums},
_class VARCHAR(255) NOT NULL,
"createdBy" VARCHAR(255),
"modifiedBy" VARCHAR(255) NOT NULL,
"modifiedOn" bigint NOT NULL,
"createdOn" bigint,
space VARCHAR(255) NOT NULL,
"attachedTo" VARCHAR(255),
data JSONB NOT NULL, data JSONB NOT NULL,
PRIMARY KEY("workspaceId", _id) PRIMARY KEY("workspaceId", _id)
)` )`
) )
await client.query(` if (schema.attachedTo !== undefined) {
CREATE INDEX ${domain}_attachedTo ON ${domain} ("attachedTo") await client.query(`
`) CREATE INDEX ${domain}_attachedTo ON ${domain} ("attachedTo")
`)
}
await client.query(` await client.query(`
CREATE INDEX ${domain}_class ON ${domain} (_class) CREATE INDEX ${domain}_class ON ${domain} (_class)
`) `)
@ -221,19 +226,67 @@ export function getDBClient (connectionString: string, database?: string): Postg
return new ClientRef(existing) return new ClientRef(existing)
} }
export function convertDoc<T extends Doc> (doc: T, workspaceId: string): DBDoc { export function convertDoc<T extends Doc> (domain: string, doc: T, workspaceId: string): DBDoc {
const { _id, _class, createdBy, modifiedBy, modifiedOn, createdOn, space, attachedTo, ...data } = doc as any const extractedFields: Doc & Record<string, any> = {
return { _id: doc._id,
_id, space: doc.space,
_class, createdBy: doc.createdBy,
createdBy, modifiedBy: doc.modifiedBy,
modifiedBy, modifiedOn: doc.modifiedOn,
modifiedOn, createdOn: doc.createdOn,
createdOn, _class: doc._class
space, }
attachedTo, const remainingData: Partial<T> = {}
for (const key in doc) {
if (Object.keys(extractedFields).includes(key)) continue
if (getDocFieldsByDomains(domain).includes(key)) {
extractedFields[key] = doc[key]
} else {
remainingData[key] = doc[key]
}
}
const res: any = {
...extractedFields,
workspaceId, workspaceId,
data data: remainingData
}
return res
}
export function parseUpdate<T extends Doc> (
domain: string,
ops: DocumentUpdate<T> | MixinUpdate<Doc, T>
): {
extractedFields: Partial<T>
remainingData: Partial<T>
} {
const extractedFields: Partial<T> = {}
const remainingData: Partial<T> = {}
for (const key in ops) {
if (key === '$push' || key === '$pull') {
const val = (ops as any)[key]
for (const k in val) {
if (getDocFieldsByDomains(domain).includes(k)) {
;(extractedFields as any)[k] = val[key]
} else {
;(remainingData as any)[k] = val[key]
}
}
} else {
if (getDocFieldsByDomains(domain).includes(key)) {
;(extractedFields as any)[key] = (ops as any)[key]
} else {
;(remainingData as any)[key] = (ops as any)[key]
}
}
}
return {
extractedFields,
remainingData
} }
} }
@ -343,39 +396,17 @@ export function parseDoc<T extends Doc> (doc: DBDoc): T {
export interface DBDoc extends Doc { export interface DBDoc extends Doc {
workspaceId: string workspaceId: string
attachedTo?: Ref<Doc>
data: Record<string, any> data: Record<string, any>
[key: string]: any
} }
export function isDataField (field: string): boolean { export function isDataField (domain: string, field: string): boolean {
return !docFields.includes(field) return !getDocFieldsByDomains(domain).includes(field)
} }
export const docFields: string[] = [ export function getDocFieldsByDomains (domain: string): string[] {
'_id', const schema = domainSchemas[domain] ?? defaultSchema
'_class', return Object.keys(schema)
'createdBy',
'modifiedBy',
'modifiedOn',
'createdOn',
'space',
'attachedTo'
] as const
export function getUpdateValue (value: any): string {
if (typeof value === 'string') {
return '"' + escapeDoubleQuotes(value) + '"'
}
if (typeof value === 'object') {
return JSON.stringify(value)
}
return value
}
function escapeDoubleQuotes (jsonString: string): string {
const unescapedQuotes = /(?<!\\)"/g
return jsonString.replace(unescapedQuotes, '\\"')
} }
export interface JoinProps { export interface JoinProps {
@ -389,3 +420,38 @@ export interface JoinProps {
toClass: Ref<Class<Doc>> toClass: Ref<Class<Doc>>
classes?: Ref<Class<Doc>>[] // filter by classes classes?: Ref<Class<Doc>>[] // filter by classes
} }
export class Mutex {
private locked: boolean = false
private readonly waitingQueue: Array<(value: boolean) => void> = []
private async acquire (): Promise<void> {
while (this.locked) {
await new Promise<boolean>((resolve) => {
this.waitingQueue.push(resolve)
})
}
this.locked = true
}
private release (): void {
if (!this.locked) {
throw new Error('Mutex is not locked')
}
this.locked = false
const nextResolver = this.waitingQueue.shift()
if (nextResolver !== undefined) {
nextResolver(true)
}
}
async runExclusive<T>(fn: () => Promise<T> | T): Promise<T> {
await this.acquire()
try {
return await fn()
} finally {
this.release()
}
}
}

View File

@ -86,7 +86,8 @@ export async function createWorkspace (
version: Data<Version>, version: Data<Version>,
progress: number, progress: number,
message?: string message?: string
) => Promise<void> ) => Promise<void>,
external: boolean = false
): Promise<void> { ): Promise<void> {
const childLogger = ctx.newChild('createWorkspace', {}, { workspace: workspaceInfo.workspace }) const childLogger = ctx.newChild('createWorkspace', {}, { workspace: workspaceInfo.workspace })
const ctxModellogger: ModelLogger = { const ctxModellogger: ModelLogger = {
@ -162,7 +163,8 @@ export async function createWorkspace (
await handleWsEvent?.('progress', version, 80 + Math.round((Math.min(value, 100) / 100) * 20)) await handleWsEvent?.('progress', version, 80 + Math.round((Math.min(value, 100) / 100) * 20))
}, },
false, false,
'disable' 'disable',
external
) )
await handleWsEvent?.('create-done', version, 100, '') await handleWsEvent?.('create-done', version, 100, '')

View File

@ -43,6 +43,7 @@ services:
links: links:
- mongodb - mongodb
- minio - minio
- postgres
ports: ports:
- 3003:3003 - 3003:3003
volumes: volumes:
@ -59,6 +60,7 @@ services:
image: hardcoreeng/workspace image: hardcoreeng/workspace
links: links:
- mongodb - mongodb
- postgres
- minio - minio
volumes: volumes:
- ./branding-test.json:/var/cfg/branding.json - ./branding-test.json:/var/cfg/branding.json
@ -106,6 +108,7 @@ services:
- elastic - elastic
- minio - minio
- rekoni - rekoni
- postgres
- account - account
ports: ports:
- 3334:3334 - 3334:3334

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -24,8 +24,8 @@ import { copyVideo, deleteVideo } from './video'
const expires = 86400 const expires = 86400
const cacheControl = `public,max-age=${expires}` const cacheControl = `public,max-age=${expires}`
// 64MB hash limit // 1MB hash limit
const HASH_LIMIT = 64 * 1024 * 1024 const HASH_LIMIT = 1 * 1024 * 1024
interface BlobMetadata { interface BlobMetadata {
lastModified: number lastModified: number
@ -75,7 +75,9 @@ export async function handleBlobGet (
const status = length !== undefined && length < object.size ? 206 : 200 const status = length !== undefined && length < object.size ? 206 : 200
const response = new Response(object?.body, { headers, status }) const response = new Response(object?.body, { headers, status })
ctx.waitUntil(cache.put(request, response.clone())) if (response.status === 200) {
ctx.waitUntil(cache.put(request, response.clone()))
}
return response return response
} }
@ -119,8 +121,22 @@ export async function deleteBlob (env: Env, workspace: string, name: string): Pr
} }
export async function postBlobFormData (request: Request, env: Env, workspace: string): Promise<Response> { export async function postBlobFormData (request: Request, env: Env, workspace: string): Promise<Response> {
const contentType = request.headers.get('Content-Type')
if (contentType === null || !contentType.includes('multipart/form-data')) {
console.error({ error: 'expected multipart/form-data' })
return error(400, 'expected multipart/form-data')
}
const sql = postgres(env.HYPERDRIVE.connectionString) const sql = postgres(env.HYPERDRIVE.connectionString)
const formData = await request.formData()
let formData: FormData
try {
formData = await request.formData()
} catch (err: any) {
const message = err instanceof Error ? err.message : String(err)
console.error({ error: 'failed to parse form data', message })
return error(400, 'failed to parse form data')
}
const files: [File, key: string][] = [] const files: [File, key: string][] = []
formData.forEach((value: any, key: string) => { formData.forEach((value: any, key: string) => {
@ -166,14 +182,11 @@ async function saveBlob (
const httpMetadata = { contentType: type, cacheControl } const httpMetadata = { contentType: type, cacheControl }
const filename = getUniqueFilename() const filename = getUniqueFilename()
const sha256hash = await getSha256(file) if (file.size <= HASH_LIMIT) {
const hash = await getSha256(file)
if (sha256hash !== null) {
// Lucky boy, nothing to upload, use existing blob
const hash = sha256hash
const data = await db.getData(sql, { hash, location }) const data = await db.getData(sql, { hash, location })
if (data !== null) { if (data !== null) {
// Lucky boy, nothing to upload, use existing blob
await db.createBlob(sql, { workspace, name, hash, location }) await db.createBlob(sql, { workspace, name, hash, location })
} else { } else {
await bucket.put(filename, file, { httpMetadata }) await bucket.put(filename, file, { httpMetadata })
@ -187,11 +200,7 @@ async function saveBlob (
} else { } else {
// For large files we cannot calculate checksum beforehead // For large files we cannot calculate checksum beforehead
// upload file with unique filename and then obtain checksum // upload file with unique filename and then obtain checksum
const object = await bucket.put(filename, file, { httpMetadata }) const { hash } = await uploadLargeFile(bucket, file, filename, { httpMetadata })
const hash =
object.checksums.md5 !== undefined ? getMd5Checksum(object.checksums.md5) : (crypto.randomUUID() as UUID)
const data = await db.getData(sql, { hash, location }) const data = await db.getData(sql, { hash, location })
if (data !== null) { if (data !== null) {
// We found an existing blob with the same hash // We found an existing blob with the same hash
@ -218,7 +227,7 @@ export async function handleBlobUploaded (env: Env, workspace: string, name: str
throw Error('blob not found') throw Error('blob not found')
} }
const hash = object.checksums.md5 !== undefined ? getMd5Checksum(object.checksums.md5) : (crypto.randomUUID() as UUID) const hash = object.checksums.md5 !== undefined ? digestToUUID(object.checksums.md5) : (crypto.randomUUID() as UUID)
const data = await db.getData(sql, { hash, location }) const data = await db.getData(sql, { hash, location })
if (data !== null) { if (data !== null) {
@ -232,23 +241,40 @@ export async function handleBlobUploaded (env: Env, workspace: string, name: str
} }
} }
async function uploadLargeFile (
bucket: R2Bucket,
file: File,
filename: string,
options: R2PutOptions
): Promise<{ hash: UUID }> {
const digestStream = new crypto.DigestStream('SHA-256')
const fileStream = file.stream()
const [digestFS, uploadFS] = fileStream.tee()
const digestPromise = digestFS.pipeTo(digestStream)
const uploadPromise = bucket.put(filename, uploadFS, options)
await Promise.all([digestPromise, uploadPromise])
const hash = digestToUUID(await digestStream.digest)
return { hash }
}
function getUniqueFilename (): UUID { function getUniqueFilename (): UUID {
return crypto.randomUUID() as UUID return crypto.randomUUID() as UUID
} }
async function getSha256 (file: File): Promise<UUID | null> { async function getSha256 (file: File): Promise<UUID> {
if (file.size > HASH_LIMIT) {
return null
}
const digestStream = new crypto.DigestStream('SHA-256') const digestStream = new crypto.DigestStream('SHA-256')
await file.stream().pipeTo(digestStream) await file.stream().pipeTo(digestStream)
const digest = await digestStream.digest const digest = await digestStream.digest
return toUUID(new Uint8Array(digest)) return digestToUUID(digest)
} }
function getMd5Checksum (digest: ArrayBuffer): UUID { function digestToUUID (digest: ArrayBuffer): UUID {
return toUUID(new Uint8Array(digest)) return toUUID(new Uint8Array(digest))
} }