Merge branch 'develop' into staging

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2025-01-20 21:56:06 +07:00
commit 1b5b52bf58
No known key found for this signature in database
GPG Key ID: BD80F68D68D8F7F2
27 changed files with 340 additions and 172 deletions

View File

@ -75,6 +75,7 @@ import path from 'path'
import { buildStorageFromConfig, createStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
import { program, type Command } from 'commander'
import { addControlledDocumentRank } from './qms'
import { clearTelegramHistory } from './telegram'
import { diffWorkspace, recreateElastic, updateField } from './workspace'
@ -2101,6 +2102,55 @@ export function devTool (
})
})
program
.command('add-controlled-doc-rank-mongo')
.description('add rank to controlled documents')
.option('-w, --workspace <workspace>', 'Selected workspace only', '')
.action(async (cmd: { workspace: string }) => {
const { version } = prepareTools()
let workspaces: Workspace[] = []
await withAccountDatabase(async (db) => {
workspaces = await listWorkspacesPure(db)
workspaces = workspaces
.filter((p) => isActiveMode(p.mode))
.filter((p) => cmd.workspace === '' || p.workspace === cmd.workspace)
.sort((a, b) => b.lastVisit - a.lastVisit)
})
console.log('found workspaces', workspaces.length)
const mongodbUri = getMongoDBUrl()
const client = getMongoClient(mongodbUri)
const _client = await client.getClient()
try {
const count = workspaces.length
let index = 0
for (const workspace of workspaces) {
index++
toolCtx.info('processing workspace', {
workspace: workspace.workspace,
version: workspace.version,
index,
count
})
if (workspace.version === undefined || !deepEqual(workspace.version, version)) {
console.log(`upgrade to ${versionToString(version)} is required`)
continue
}
const workspaceId = getWorkspaceId(workspace.workspace)
const wsDb = getWorkspaceMongoDB(_client, { name: workspace.workspace })
await addControlledDocumentRank(toolCtx, wsDb, workspaceId)
}
} finally {
client.close()
}
})
extendProgram?.(program)
program.parse(process.argv)

55
dev/tool/src/qms.ts Normal file
View File

@ -0,0 +1,55 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import { type MeasureContext, type Ref, type WorkspaceId } from '@hcengineering/core'
import documents, { type DocumentMeta, type ProjectMeta } from '@hcengineering/controlled-documents'
import { DOMAIN_DOCUMENTS } from '@hcengineering/model-controlled-documents'
import { type Db } from 'mongodb'
import { makeRank } from '@hcengineering/task'
export async function addControlledDocumentRank (ctx: MeasureContext, db: Db, workspaceId: WorkspaceId): Promise<void> {
const collections = await db.listCollections().toArray()
if (collections.find((it) => it.name === DOMAIN_DOCUMENTS) === undefined) {
ctx.error('skipping migration, no collection found', { workspace: workspaceId.name })
return
}
const prjMeta = await db
.collection<ProjectMeta>(DOMAIN_DOCUMENTS)
.find({ _class: documents.class.ProjectMeta })
.toArray()
const docMeta = await db
.collection<DocumentMeta>(DOMAIN_DOCUMENTS)
.find({ _class: documents.class.DocumentMeta })
.toArray()
const docMetaById = new Map<Ref<DocumentMeta>, DocumentMeta>()
for (const doc of docMeta) {
docMetaById.set(doc._id, doc)
}
prjMeta.sort((a, b) => {
const titleA = docMetaById.get(a.meta)?.title ?? ''
const titleB = docMetaById.get(b.meta)?.title ?? ''
return titleA.localeCompare(titleB, undefined, { numeric: true })
})
let rank = makeRank(undefined, undefined)
for (const doc of prjMeta) {
await db.collection(DOMAIN_DOCUMENTS).updateOne({ _id: doc._id }, { $set: { rank } })
rank = makeRank(rank, undefined)
}
}

View File

@ -13,7 +13,8 @@
// limitations under the License.
-->
<script lang="ts">
import { closePopup, closeTooltip, navigate, parseLocation } from '@hcengineering/ui'
import { getMetadata } from '@hcengineering/platform'
import uiPlugin, { closePopup, closeTooltip, navigate, parseLocation } from '@hcengineering/ui'
export let href: string | undefined
export let disabled = false
@ -43,11 +44,16 @@
closeTooltip()
try {
const url = new URL(href)
if (url.origin === window.location.origin) {
e.preventDefault()
e.stopPropagation()
navigate(parseLocation(url))
const loc = parseLocation(url)
const routes = getMetadata(uiPlugin.metadata.Routes)
const app = routes?.get(loc.path[0])
if (app !== undefined) {
e.preventDefault()
e.stopPropagation()
navigate(loc)
}
}
} catch {}
}

View File

@ -15,7 +15,7 @@
<script lang="ts">
import { getMetadata } from '@hcengineering/platform'
import { MarkupMark, MarkupMarkType } from '@hcengineering/text'
import { navigate, parseLocation } from '@hcengineering/ui'
import uiPlugin, { navigate, parseLocation } from '@hcengineering/ui'
import presentation from '../../plugin'
@ -29,8 +29,15 @@
const frontUrl = getMetadata(presentation.metadata.FrontUrl) ?? window.location.origin
if (url.origin === frontUrl) {
e.preventDefault()
navigate(parseLocation(url))
const loc = parseLocation(url)
const routes = getMetadata(uiPlugin.metadata.Routes)
const app = routes?.get(loc.path[0])
if (app !== undefined) {
e.preventDefault()
e.stopPropagation()
navigate(loc)
}
}
}
} catch (err) {

View File

@ -57,6 +57,7 @@
"Next": "Další",
"Skip": "Přeskočit",
"SignUpCompleted": "Registrace dokončena",
"StartUsingHuly": "Začněte používat Huly"
"StartUsingHuly": "Začněte používat Huly",
"WorkspaceIsArchived": "Pracovní prostor je archivován kvůli nečinnosti. Kontaktujte nás prosím pro obnovení..."
}
}

View File

@ -57,6 +57,7 @@
"Next": "Weiter",
"Skip": "Überspringen",
"SignUpCompleted": "Registrierung abgeschlossen",
"StartUsingHuly": "Starten Sie mit Huly"
"StartUsingHuly": "Starten Sie mit Huly",
"WorkspaceArchivedDesc": "Workspace wurde wegen Inaktivität archiviert. Bitte kontaktieren Sie uns zur Wiederherstellung..."
}
}

View File

@ -57,6 +57,7 @@
"Next": "Next",
"Skip": "Skip",
"SignUpCompleted": "Sign up completed",
"StartUsingHuly": "Start using Huly"
"StartUsingHuly": "Start using Huly",
"WorkspaceArchivedDesc": "Workspace is archived because of being unused, Please contact us to restore..."
}
}

View File

@ -57,6 +57,7 @@
"Next": "Siguiente",
"Skip": "Saltar",
"SignUpCompleted": "Registro completado",
"StartUsingHuly": "Comienza a usar Huly"
"StartUsingHuly": "Comienza a usar Huly",
"WorkspaceArchivedDesc": "El espacio de trabajo está archivado por no estar en uso, por favor contáctenos para restaurarlo..."
}
}

View File

@ -57,6 +57,7 @@
"Next": "Suivant",
"Skip": "Passer",
"SignUpCompleted": "Inscription terminée",
"StartUsingHuly": "Commencez à utiliser Huly"
"StartUsingHuly": "Commencez à utiliser Huly",
"WorkspaceArchivedDesc": "L'espace de travail est archivé en raison de son inactivité, veuillez nous contacter pour le restaurer..."
}
}

View File

@ -57,6 +57,7 @@
"Next": "Avanti",
"Skip": "Salta",
"SignUpCompleted": "Registrazione completata",
"StartUsingHuly": "Inizia a usare Huly"
"StartUsingHuly": "Inizia a usare Huly",
"WorkspaceArchivedDesc": "Il workspace è stato archiviato perché inutilizzato. Si prega di contattarci per ripristinarlo..."
}
}

View File

@ -57,6 +57,7 @@
"Next": "Seguinte",
"Skip": "Saltar",
"SignUpCompleted": "Registo concluído",
"StartUsingHuly": "Começar a usar Huly"
"StartUsingHuly": "Começar a usar Huly",
"WorkspaceArchivedDesc": "O espaço de trabalho está arquivado por estar inativo, por favor, entre em contato conosco para restaurá-lo..."
}
}

View File

@ -57,6 +57,7 @@
"Next": "Дальше",
"Skip": "Пропустить",
"SignUpCompleted": "Регистрация завершена",
"StartUsingHuly": "Начать использовать Huly"
"StartUsingHuly": "Начать использовать Huly",
"WorkspaceArchivedDesc": "Рабочее пространство архивировано из-за неиспользования, пожалуйста, свяжитесь с нами для восстановления..."
}
}

View File

@ -57,6 +57,7 @@
"Next": "下一个",
"Skip": "跳过",
"SignUpCompleted": "注册完成",
"StartUsingHuly": "开始使用 Huly"
"StartUsingHuly": "开始使用 Huly",
"WorkspaceArchivedDesc": "工作区因未使用而归档,请与我们联系以恢复..."
}
}

View File

@ -60,7 +60,8 @@
(it) =>
(it.workspaceName?.includes(search) ?? false) ||
(it.workspaceUrl?.includes(search) ?? false) ||
it.workspace?.includes(search)
it.workspace?.includes(search) ||
it.createdBy?.includes(search)
),
(it) => {
const lastUsageDays = Math.round((now - it.lastVisit) / (1000 * 3600 * 24))
@ -169,6 +170,9 @@
/>
</div>
</span>
<div class="ml-1" style:width={'12rem'}>
{workspace.createdBy}
</div>
<span class="label overflow-label" style:width={'8rem'}>
{workspace.region ?? ''}
</span>

View File

@ -14,24 +14,26 @@
// limitations under the License.
-->
<script lang="ts">
import { isArchivingMode } from '@hcengineering/core'
import { LoginInfo, Workspace } from '@hcengineering/login'
import { OK, Severity, Status } from '@hcengineering/platform'
import presentation, { NavLink, isAdminUser, reduceCalls } from '@hcengineering/presentation'
import MessageBox from '@hcengineering/presentation/src/components/MessageBox.svelte'
import {
Button,
Label,
Spinner,
Scroller,
SearchEdit,
Spinner,
deviceOptionsStore as deviceInfo,
setMetadataLocalStorage,
showPopup,
ticker
} from '@hcengineering/ui'
import { onMount } from 'svelte'
import login from '../plugin'
import { getAccount, getHref, getWorkspaces, goTo, navigateToWorkspace, selectWorkspace } from '../utils'
import StatusControl from './StatusControl.svelte'
import { isArchivingMode } from '@hcengineering/core'
export let navigateUrl: string | undefined = undefined
let workspaces: Workspace[] = []
@ -67,6 +69,17 @@
status = new Status(Severity.INFO, login.status.ConnectingToServer, {})
const [loginStatus, result] = await selectWorkspace(workspace)
if (isArchivingMode(result?.mode)) {
showPopup(MessageBox, {
label: login.string.SelectWorkspace,
message: login.string.WorkspaceArchivedDesc,
canSubmit: false,
params: {},
action: async () => {}
})
status = loginStatus
return
}
status = loginStatus
navigateToWorkspace(workspace, result, navigateUrl)
@ -139,17 +152,19 @@
{#if isArchivingMode(workspace.mode)}
- <Label label={presentation.string.Archived} />
{/if}
{#if workspace.mode !== 'active'}
{#if workspace.mode !== 'active' && workspace.mode !== 'archived'}
({workspace.progress}%)
{/if}
</span>
{#if isAdmin}
<span class="text-xs flex-row-center flex-center">
<span class="text-xs flex-row-center flex-center">
{#if isAdmin}
{workspace.workspace}
{#if workspace.region !== undefined}
at ({workspace.region})
{/if}
<div class="text-sm">
{/if}
<div class="text-sm">
{#if isAdmin}
{#if workspace.backupInfo != null}
{@const sz = workspace.backupInfo.dataSize + workspace.backupInfo.blobsSize}
{@const szGb = Math.round((sz * 100) / 1024) / 100}
@ -159,10 +174,10 @@
- {Math.round(sz)}Mb -
{/if}
{/if}
({lastUsageDays} days)
</div>
</span>
{/if}
{/if}
({lastUsageDays} days)
</div>
</span>
</div>
</div>
{/each}

View File

@ -85,7 +85,9 @@ export default plugin(loginId, {
LinkValidHours: '' as IntlString,
EmailMask: '' as IntlString,
NoLimit: '' as IntlString,
InviteLimit: '' as IntlString
InviteLimit: '' as IntlString,
WorkspaceArchived: '' as IntlString,
WorkspaceArchivedDesc: '' as IntlString
},
function: {
SendInvite: '' as Resource<(email: string, personId?: Ref<Doc>, role?: AccountRole) => Promise<void>>,

View File

@ -100,6 +100,10 @@ export async function resolveLocation (loc: Location): Promise<ResolvedLocation
return undefined
}
if (loc.path[3] === undefined) {
return undefined
}
if (loc.path[3] === 'surveys') {
return undefined
}

View File

@ -228,18 +228,22 @@
if (listListCategory?.[0] == null) {
return
}
const obj = listListCategory[0].getLimited()[0]
listListCategory[0].expand()
select(0, obj)
const obj = listListCategory[0]?.getLimited()?.[0]
if (obj !== undefined) {
listListCategory[0]?.expand()
select(0, obj)
}
return
} else {
if (listListCategory?.[0] == null) {
return
}
const g = listListCategory[categories.length - 1].getLimited()
listListCategory[categories.length - 1].expand()
const obj = g[g.length - 1]
select(0, obj)
const g = listListCategory[categories.length - 1]?.getLimited() ?? []
if (g.length > 0) {
listListCategory[categories.length - 1].expand()
const obj = g[g.length - 1]
select(0, obj)
}
return
}
} else {
@ -297,17 +301,21 @@
if (dir === undefined || dir === 'vertical') {
if (statePos - 1 < 0 && objState >= 0) {
if (objState !== 0) {
const pstateObjs = listListCategory[objState - 1].getLimited()
dispatch('select', pstateObjs[pstateObjs.length - 1])
const pstateObjs = listListCategory[objState - 1]?.getLimited()
if (pstateObjs !== undefined) {
dispatch('select', pstateObjs[pstateObjs.length - 1])
}
} else {
dispatch('select-prev', stateObjs[statePos])
}
} else {
const obj = stateObjs[statePos - 1]
if (obj !== undefined) {
const focusDoc = listListCategory[objState]?.getLimited().find((it) => it._id === obj._id) ?? obj
if (!noScroll) scrollInto(objState, focusDoc)
dispatch('row-focus', focusDoc)
const focusDoc = listListCategory[objState]?.getLimited()?.find((it) => it._id === obj._id) ?? obj
if (focusDoc !== undefined) {
if (!noScroll) scrollInto(objState, focusDoc)
dispatch('row-focus', focusDoc)
}
}
}
return
@ -315,7 +323,7 @@
}
if (offset === 1) {
if (dir === undefined || dir === 'vertical') {
const limited = listListCategory[objState].getLimited()
const limited = listListCategory[objState]?.getLimited() ?? []
if (statePos + 1 >= limited.length && objState < categories.length) {
if (objState + 1 !== categories.length) {
const pstateObjs = getGroupByValues(groupByDocs, categories[objState + 1])
@ -326,18 +334,22 @@
} else {
const obj = stateObjs[statePos + 1]
if (obj !== undefined) {
const focusDoc = listListCategory[objState]?.getLimited().find((it) => it._id === obj._id) ?? obj
if (!noScroll) scrollInto(objState, focusDoc)
dispatch('row-focus', focusDoc)
const focusDoc = listListCategory[objState]?.getLimited()?.find((it) => it._id === obj._id) ?? obj
if (focusDoc !== undefined) {
if (!noScroll) scrollInto(objState, focusDoc)
dispatch('row-focus', focusDoc)
}
}
}
return
}
}
if (offset === 0) {
const focusDoc = listListCategory[objState]?.getLimited().find((it) => it._id === obj._id) ?? obj
if (!noScroll) scrollInto(objState, focusDoc)
dispatch('row-focus', focusDoc)
const focusDoc = listListCategory[objState]?.getLimited()?.find((it) => it._id === obj._id) ?? obj
if (focusDoc !== undefined) {
if (!noScroll) scrollInto(objState, focusDoc)
dispatch('row-focus', focusDoc)
}
}
} else {
listCategory[objState]?.select(offset, of, dir, noScroll)

View File

@ -41,7 +41,7 @@ import {
createPostgreeDestroyAdapter,
createPostgresAdapter,
createPostgresTxAdapter,
setDbUnsafePrepareOptions
setDBExtraOptions
} from '@hcengineering/postgres'
import { readFileSync } from 'node:fs'
const model = JSON.parse(readFileSync(process.env.MODEL_JSON ?? 'model.json').toString()) as Tx[]
@ -83,11 +83,8 @@ export function start (
const usePrepare = process.env.DB_PREPARE === 'true'
setDbUnsafePrepareOptions({
find: usePrepare,
model: false,
update: usePrepare,
upload: usePrepare
setDBExtraOptions({
prepare: usePrepare // We override defaults
})
registerServerPlugins()

View File

@ -504,7 +504,7 @@ export async function selectWorkspace (
const result: WorkspaceLoginInfo = {
endpoint: '',
email,
token: '',
token: generateToken(email, getWorkspaceId(workspaceInfo.workspace), getExtra(accountInfo)),
workspace: workspaceUrl,
workspaceId: workspaceInfo.workspace,
mode: workspaceInfo.mode,

View File

@ -109,7 +109,7 @@ export class DbAdapterManagerImpl implements DBAdapterManager {
}
}
async initAdapters (): Promise<void> {
async initAdapters (ctx: MeasureContext): Promise<void> {
for (const [key, adapter] of this.adapters) {
// already initialized
if (key !== this.conf.domains[DOMAIN_TX] && adapter.init !== undefined) {
@ -130,7 +130,7 @@ export class DbAdapterManagerImpl implements DBAdapterManager {
}
}
}
await adapter?.init?.(this.metrics, domains, excludeDomains)
await ctx.with(`init adapter ${key}`, {}, (ctx) => adapter?.init?.(ctx, domains, excludeDomains))
}
}
}

View File

@ -19,7 +19,7 @@ import { getDBClient, retryTxn } from './utils'
export { getDocFieldsByDomains, translateDomain } from './schemas'
export * from './storage'
export { convertDoc, createTables, getDBClient, retryTxn, setDBExtraOptions, setDbUnsafePrepareOptions } from './utils'
export { convertDoc, createTables, getDBClient, retryTxn, setDBExtraOptions, setExtraOptions } from './utils'
export function createPostgreeDestroyAdapter (url: string): WorkspaceDestroyAdapter {
return {

View File

@ -80,8 +80,8 @@ import {
createTables,
DBCollectionHelper,
type DBDoc,
dbUnsafePrepareOptions,
getDBClient,
getPrepare,
inferType,
isDataField,
isOwner,
@ -325,14 +325,15 @@ class ValuesVariables {
add (value: any, type: string = ''): string {
// Compact value if string and same
if (typeof value === 'string') {
const v = this.valueHashes.get(value + ':' + type)
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
const vkey = `${value}:${type}`
const v = this.valueHashes.get(vkey)
if (v !== undefined) {
return v
}
this.values.push(value)
const idx = type !== '' ? `$${this.index++}${type}` : `$${this.index++}`
this.valueHashes.set(value + ':' + type, idx)
this.valueHashes.set(vkey, idx)
return idx
} else {
this.values.push(value)
@ -359,6 +360,33 @@ class ValuesVariables {
})
return vv
}
injectVars (sql: string): string {
const escQuote = (d: any | any[]): string => {
if (d == null) {
return 'NULL'
}
if (Array.isArray(d)) {
return 'ARRAY[' + d.map(escQuote).join(',') + ']'
}
switch (typeof d) {
case 'number':
if (isNaN(d) || !isFinite(d)) {
throw new Error('Invalid number value')
}
return d.toString()
case 'boolean':
return d ? 'TRUE' : 'FALSE'
case 'string':
return `'${d.replace(/'/g, "''")}'`
default:
throw new Error(`Unsupported value type: ${typeof d}`)
}
}
return sql.replaceAll(/(\$\d+)/g, (_, v) => {
return escQuote(this.getValues()[parseInt(v.substring(1)) - 1] ?? v)
})
}
}
abstract class PostgresAdapterBase implements DbAdapter {
@ -457,9 +485,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
}
const finalSql: string = [select, ...sqlChunks].join(' ')
const result: DBDoc[] = await this.mgr.retry(undefined, (client) =>
client.unsafe(finalSql, vars.getValues(), {
prepare: dbUnsafePrepareOptions.find
})
client.unsafe(finalSql, vars.getValues(), getPrepare())
)
return result.map((p) => parseDocWithProjection(p, domain, options?.projection))
}
@ -519,9 +545,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
const res = await client.unsafe(
`SELECT * FROM ${translateDomain(domain)} WHERE ${translatedQuery} FOR UPDATE`,
vars.getValues(),
{
prepare: dbUnsafePrepareOptions.find
}
getPrepare()
)
const docs = res.map((p) => parseDoc(p as any, schemaFields.schema))
for (const doc of docs) {
@ -553,9 +577,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
WHERE "workspaceId" = ${params.add(this.workspaceId.name, '::uuid')}
AND _id = ${params.add(doc._id, '::text')}`,
params.getValues(),
{
prepare: dbUnsafePrepareOptions.update
}
getPrepare()
)
}
})
@ -593,9 +615,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
await client.unsafe(
`UPDATE ${translateDomain(domain)} SET ${updates.join(', ')} WHERE ${translatedQuery};`,
vars.getValues(),
{
prepare: dbUnsafePrepareOptions.find
}
getPrepare()
)
})
}
@ -604,9 +624,11 @@ abstract class PostgresAdapterBase implements DbAdapter {
const vars = new ValuesVariables()
const translatedQuery = this.buildRawQuery(vars, domain, query)
await this.mgr.retry(undefined, async (client) => {
await client.unsafe(`DELETE FROM ${translateDomain(domain)} WHERE ${translatedQuery}`, vars.getValues(), {
prepare: dbUnsafePrepareOptions.update
})
await client.unsafe(
`DELETE FROM ${translateDomain(domain)} WHERE ${translatedQuery}`,
vars.getValues(),
getPrepare()
)
})
}
@ -670,18 +692,15 @@ abstract class PostgresAdapterBase implements DbAdapter {
if (options?.total === true) {
const totalReq = `SELECT COUNT(${domain}._id) as count FROM ${domain}`
const totalSql = [totalReq, ...totalSqlChunks].join(' ')
const totalResult = await connection.unsafe(totalSql, vars.getValues(), {
prepare: dbUnsafePrepareOptions.find
})
const totalResult = await connection.unsafe(totalSql, vars.getValues(), getPrepare())
const parsed = Number.parseInt(totalResult[0].count)
total = Number.isNaN(parsed) ? 0 : parsed
}
const finalSql: string = [select, ...sqlChunks].join(' ')
fquery = finalSql
const result = await connection.unsafe(finalSql, vars.getValues(), {
prepare: dbUnsafePrepareOptions.find
})
const result = await connection.unsafe(finalSql, vars.getValues(), getPrepare())
if (
options?.lookup === undefined &&
options?.domainLookup === undefined &&
@ -697,7 +716,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
}
})) as FindResult<T>
} catch (err) {
ctx.error('Error in findAll', { err })
ctx.error('Error in findAll', { err, sql: vars.injectVars(fquery) })
throw err
}
},
@ -1170,8 +1189,13 @@ abstract class PostgresAdapterBase implements DbAdapter {
if (join.isReverse) {
return `${join.toAlias}->'${tKey}'`
}
const res = isDataField(domain, tKey) ? (isDataArray ? `data->'${tKey}'` : `data#>>'{${tKey}}'`) : key
return `${join.toAlias}.${res}`
if (isDataField(domain, tKey)) {
if (isDataArray) {
return `${join.toAlias}."data"->'${tKey}'`
}
return `${join.toAlias}."data"#>>'{${tKey}}'`
}
return `${join.toAlias}."${tKey}"`
}
private transformKey<T extends Doc>(
@ -1505,9 +1529,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
`SELECT * FROM ${translateDomain(domain)}
WHERE "workspaceId" = $1::uuid AND _id = ANY($2::text[])`,
[this.workspaceId.name, docs],
{
prepare: dbUnsafePrepareOptions.find
}
getPrepare()
)
return res.map((p) => parseDocWithProjection(p as any, domain))
})
@ -1562,9 +1584,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
`INSERT INTO ${tdomain} ("workspaceId", ${insertStr}) VALUES ${vals}
ON CONFLICT ("workspaceId", _id) DO UPDATE SET ${onConflictStr};`,
values.getValues(),
{
prepare: dbUnsafePrepareOptions.upload
}
getPrepare()
)
)
} else {
@ -1574,9 +1594,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
await client.unsafe(
`INSERT INTO ${tdomain} ("workspaceId", ${insertStr}) VALUES ${vals};`,
values.getValues(),
{
prepare: dbUnsafePrepareOptions.upload
}
getPrepare()
)
)
}
@ -1598,9 +1616,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
client.unsafe(
`DELETE FROM ${tdomain} WHERE "workspaceId" = $1 AND _id = ANY($2::text[])`,
[this.workspaceId.name, part],
{
prepare: dbUnsafePrepareOptions.upload
}
getPrepare()
)
)
})
@ -1619,9 +1635,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
const vars = new ValuesVariables()
const finalSql = `SELECT DISTINCT ${key} as ${field}, Count(*) AS count FROM ${translateDomain(domain)} WHERE ${this.buildRawQuery(vars, domain, query ?? {})} GROUP BY ${key}`
return await this.mgr.retry(ctx.id, async (connection) => {
const result = await connection.unsafe(finalSql, vars.getValues(), {
prepare: dbUnsafePrepareOptions.find
})
const result = await connection.unsafe(finalSql, vars.getValues(), getPrepare())
return new Map(result.map((r) => [r[field.toLocaleLowerCase()], parseInt(r.count)]))
})
} catch (err) {
@ -1722,9 +1736,7 @@ class PostgresAdapter extends PostgresAdapterBase {
SET ${updates.join(', ')}
WHERE "workspaceId" = ${wsId} AND _id = ${oId}`,
params.getValues(),
{
prepare: dbUnsafePrepareOptions.update
}
getPrepare()
)
})
})
@ -1837,9 +1849,7 @@ class PostgresAdapter extends PostgresAdapterBase {
WHERE "workspaceId" = ${wsId}
AND _id = ${oId}`,
params.getValues(),
{
prepare: dbUnsafePrepareOptions.update
}
getPrepare()
)
})
if (tx.retrieve === true && doc !== undefined) {
@ -1928,11 +1938,7 @@ class PostgresAdapter extends PostgresAdapterBase {
WHERE "workspaceId" = $1::uuid AND "_id" = update_data.__id`
await this.mgr.retry(ctx.id, (client) =>
ctx.with('bulk-update', {}, () =>
client.unsafe(op, data, {
prepare: dbUnsafePrepareOptions.update
})
)
ctx.with('bulk-update', {}, () => client.unsafe(op, data, getPrepare()))
)
}
}
@ -1966,9 +1972,7 @@ class PostgresAdapter extends PostgresAdapterBase {
forUpdate ? ' FOR UPDATE' : ''
}`,
[this.workspaceId.name, _id],
{
prepare: dbUnsafePrepareOptions.find
}
getPrepare()
)
const dbDoc = res[0] as any
return dbDoc !== undefined ? parseDoc(dbDoc, getSchema(domain)) : undefined
@ -2015,9 +2019,7 @@ class PostgresTxAdapter extends PostgresAdapterBase implements TxAdapter {
async getModel (ctx: MeasureContext): Promise<Tx[]> {
const res: DBDoc[] = await this.mgr.retry(undefined, (client) => {
return client.unsafe(
`SELECT * FROM "${translateDomain(DOMAIN_MODEL_TX)}" WHERE "workspaceId" = $1::uuid ORDER BY _id::text ASC, "modifiedOn"::bigint ASC`,
[this.workspaceId.name],
{ prepare: dbUnsafePrepareOptions.model }
`SELECT * FROM "${translateDomain(DOMAIN_MODEL_TX)}" WHERE "workspaceId" = '${this.workspaceId.name}'::uuid ORDER BY _id::text ASC, "modifiedOn"::bigint ASC`
)
})

View File

@ -55,6 +55,8 @@ process.on('exit', () => {
const clientRefs = new Map<string, ClientRef>()
const loadedDomains = new Set<string>()
let loadedTables = new Set<string>()
export async function retryTxn (
pool: postgres.Sql,
operation: (client: postgres.TransactionSql) => Promise<any>
@ -83,26 +85,30 @@ export async function createTables (
return
}
const mapped = filtered.map((p) => translateDomain(p))
const tables = await ctx.with('load-table', {}, () =>
client.unsafe(
`
const t = Date.now()
loadedTables =
loadedTables.size === 0
? new Set(
(
await ctx.with('load-table', {}, () =>
client.unsafe(`
SELECT table_name
FROM information_schema.tables
WHERE table_name = ANY( $1::text[] )
`,
[mapped]
)
)
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
AND table_name NOT LIKE 'pg_%'`)
)
).map((it) => it.table_name)
)
: loadedTables
console.log('load-table', Date.now() - t)
const exists = new Set(tables.map((it) => it.table_name))
const domainsToLoad = mapped.filter((it) => exists.has(it))
const domainsToLoad = mapped.filter((it) => loadedTables.has(it))
if (domainsToLoad.length > 0) {
await ctx.with('load-schemas', {}, () => getTableSchema(client, domainsToLoad))
}
const domainsToCreate: string[] = []
for (const domain of mapped) {
if (!exists.has(domain)) {
if (!loadedTables.has(domain)) {
domainsToCreate.push(domain)
} else {
loadedDomains.add(url + domain)
@ -120,13 +126,10 @@ export async function createTables (
}
async function getTableSchema (client: postgres.Sql, domains: string[]): Promise<void> {
const res = await client.unsafe(
`SELECT column_name::name, data_type::text, is_nullable::text, table_name::name
const res = await client.unsafe(`SELECT column_name::name, data_type::text, is_nullable::text, table_name::name
FROM information_schema.columns
WHERE table_name = ANY($1::text[]) and table_schema = 'public'::name
ORDER BY table_name::name, ordinal_position::int ASC;`,
[domains]
)
WHERE table_name IN (${domains.map((it) => `'${it}'`).join(', ')}) and table_schema = 'public'::name
ORDER BY table_name::name, ordinal_position::int ASC;`)
const schemas: Record<string, Schema> = {}
for (const column of res) {
@ -277,27 +280,25 @@ export class ClientRef implements PostgresClientReference {
}
}
let dbExtraOptions: Partial<Options<any>> = {}
export let dbExtraOptions: Partial<Options<any>> = {}
export function setDBExtraOptions (options: Partial<Options<any>>): void {
dbExtraOptions = options
}
export interface DbUnsafePrepareOptions {
upload: boolean
find: boolean
update: boolean
model: boolean
export function getPrepare (): { prepare: boolean } {
return { prepare: dbExtraOptions.prepare ?? false }
}
export let dbUnsafePrepareOptions: DbUnsafePrepareOptions = {
upload: true,
find: true,
update: true,
model: true
export interface DBExtraOptions {
useCF: boolean
}
export function setDbUnsafePrepareOptions (options: DbUnsafePrepareOptions): void {
dbUnsafePrepareOptions = options
export let dbExtra: DBExtraOptions = {
useCF: false
}
export function setExtraOptions (options: DBExtraOptions): void {
dbExtra = options
}
/**

View File

@ -5,7 +5,7 @@
"template": "cloud",
"scripts": {
"deploy": "wrangler deploy",
"dev": "wrangler dev --port 3335",
"dev": "wrangler dev --port 3335 --remote",
"dev-local": "wrangler dev --port 3335 --local --upstream-protocol=http",
"start": "wrangler dev --port 3335",
"logs": "npx wrangler tail --format pretty",

View File

@ -38,8 +38,9 @@ import {
createPostgreeDestroyAdapter,
createPostgresAdapter,
createPostgresTxAdapter,
getDBClient,
setDBExtraOptions,
setDbUnsafePrepareOptions
setExtraOptions
} from '@hcengineering/postgres'
import {
createServerPipeline,
@ -75,13 +76,11 @@ export class Transactor extends DurableObject<Env> {
ssl: false,
connection: {
application_name: 'cloud-transactor'
}
},
prepare: false
})
setDbUnsafePrepareOptions({
upload: false,
find: false,
update: false,
model: false
setExtraOptions({
useCF: true
})
registerTxAdapterFactory('postgresql', createPostgresTxAdapter, true)
registerAdapterFactory('postgresql', createPostgresAdapter, true)
@ -105,23 +104,28 @@ export class Transactor extends DurableObject<Env> {
console.log({ message: 'use stats', url: this.env.STATS_URL })
console.log({ message: 'use fulltext', url: this.env.FULLTEXT_URL })
const dbUrl = env.DB_MODE === 'direct' ? env.DB_URL ?? '' : env.HYPERDRIVE.connectionString
// TODO:
const storage = createDummyStorageAdapter()
this.pipelineFactory = async (ctx, ws, upgrade, broadcast, branding) => {
const pipeline = createServerPipeline(
this.measureCtx,
env.DB_MODE === 'direct' ? env.DB_URL ?? '' : env.HYPERDRIVE.connectionString,
model,
{
externalStorage: storage,
adapterSecurity: false,
disableTriggers: false,
fulltextUrl: env.FULLTEXT_URL,
extraLogging: true
}
)
return await pipeline(ctx, ws, upgrade, broadcast, branding)
const pipeline = createServerPipeline(this.measureCtx, dbUrl, model, {
externalStorage: storage,
adapterSecurity: false,
disableTriggers: false,
fulltextUrl: env.FULLTEXT_URL,
extraLogging: true
})
const result = await pipeline(ctx, ws, upgrade, broadcast, branding)
const client = getDBClient(dbUrl)
const connection = await client.getClient()
const t1 = Date.now()
await connection`select now()`
console.log('DB query time', Date.now() - t1)
client.close()
return result
}
void this.ctx

View File

@ -13,8 +13,8 @@ head_sampling_rate = 1 # optional. default = 1.
# If you are running back-end logic in a Worker, running it closer to your back-end infrastructure
# rather than the end user may result in better performance.
# Docs: https://developers.cloudflare.com/workers/configuration/smart-placement/#smart-placement
# [placement]
# mode = "smart"
[placement]
mode = "smart"
# Variable bindings. These are arbitrary, plaintext strings (similar to environment variables)
# Docs: