Merge branch 'develop' into staging-new

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2025-02-07 15:42:45 +07:00
commit c66593b82b
No known key found for this signature in database
GPG Key ID: BD80F68D68D8F7F2
37 changed files with 335 additions and 324 deletions

View File

@ -31,9 +31,7 @@ export const attachmentOperation: MigrateOperation = {
DOMAIN_ATTACHMENT,
{ _class: attachment.class.Attachment, attachedToClass: 'chunter:class:Comment' },
{
$set: {
attachedToClass: 'chunter:class:ChatMessage'
}
attachedToClass: 'chunter:class:ChatMessage'
}
)
}

View File

@ -168,7 +168,7 @@ export const contactOperation: MigrateOperation = {
objectClass: 'contact:class:Employee'
},
{
$set: { objectClass: contact.mixin.Employee }
objectClass: contact.mixin.Employee
}
)
@ -178,7 +178,7 @@ export const contactOperation: MigrateOperation = {
'tx.attributes.srcDocClass': 'contact:class:Employee'
},
{
$set: { 'tx.attributes.srcDocClass': contact.mixin.Employee }
'tx.attributes.srcDocClass': contact.mixin.Employee
}
)
@ -188,7 +188,7 @@ export const contactOperation: MigrateOperation = {
'tx.attributes.srcDocClass': 'contact:class:Employee'
},
{
$set: { 'tx.attributes.srcDocClass': contact.mixin.Employee }
'tx.attributes.srcDocClass': contact.mixin.Employee
}
)
@ -199,7 +199,7 @@ export const contactOperation: MigrateOperation = {
'attributes.type.to': 'contact:class:Employee'
},
{
$set: { 'attributes.type.to': contact.mixin.Employee }
'attributes.type.to': contact.mixin.Employee
}
)
await client.update(
@ -209,7 +209,7 @@ export const contactOperation: MigrateOperation = {
'operations.type.to': 'contact:class:Employee'
},
{
$set: { 'operations.type.to': contact.mixin.Employee }
'operations.type.to': contact.mixin.Employee
}
)
@ -219,7 +219,7 @@ export const contactOperation: MigrateOperation = {
'attributes.extends': 'contact:class:Employee'
},
{
$set: { 'attributes.extends': contact.mixin.Employee }
'attributes.extends': contact.mixin.Employee
}
)
@ -227,7 +227,7 @@ export const contactOperation: MigrateOperation = {
await client.update(
d,
{ attachedToClass: 'contact:class:Employee' },
{ $set: { attachedToClass: contact.mixin.Employee } }
{ attachedToClass: contact.mixin.Employee }
)
}
await client.update(
@ -236,17 +236,17 @@ export const contactOperation: MigrateOperation = {
_class: activity.class.ActivityReference,
srcDocClass: 'contact:class:Employee'
},
{ $set: { srcDocClass: contact.mixin.Employee } }
{ srcDocClass: contact.mixin.Employee }
)
await client.update(
'tags' as Domain,
{ targetClass: 'contact:class:Employee' },
{ $set: { targetClass: contact.mixin.Employee } }
{ targetClass: contact.mixin.Employee }
)
await client.update(
DOMAIN_VIEW,
{ filterClass: 'contact:class:Employee' },
{ $set: { filterClass: contact.mixin.Employee } }
{ filterClass: contact.mixin.Employee }
)
await client.update(
DOMAIN_CONTACT,
@ -260,9 +260,7 @@ export const contactOperation: MigrateOperation = {
displayName: `${contact.mixin.Employee as string}.displayName`,
position: `${contact.mixin.Employee as string}.position`
},
$set: {
_class: contact.class.Person
}
_class: contact.class.Person
}
)
}

View File

@ -270,9 +270,7 @@ async function migrateSpaceTypes (client: MigrationClient): Promise<void> {
'attributes.descriptor': documents.descriptor.DocumentSpaceType
},
{
$set: {
objectClass: documents.class.DocumentSpaceType
}
objectClass: documents.class.DocumentSpaceType
}
)
}
@ -398,7 +396,7 @@ async function migrateProjectMetaRank (client: MigrationClient): Promise<void> {
for (const doc of projectMeta) {
operations.push({
filter: { _id: doc._id },
update: { $set: { rank } }
update: { rank }
})
rank = makeRank(rank, undefined)
}

View File

@ -113,10 +113,8 @@ async function migrateAllSpaceToTyped (client: MigrationClient): Promise<void> {
_class: core.class.Space
},
{
$set: {
_class: core.class.TypedSpace,
type: core.spaceType.SpacesType
}
_class: core.class.TypedSpace,
type: core.spaceType.SpacesType
}
)
}
@ -135,9 +133,7 @@ async function migrateSpacesOwner (client: MigrationClient): Promise<void> {
_id: space._id
},
{
$set: {
owners: [space.createdBy]
}
owners: [space.createdBy]
}
)
}
@ -669,7 +665,7 @@ export const coreOperation: MigrateOperation = {
func: async (client: MigrationClient): Promise<void> => {
const now = Date.now().toString(16)
for (const d of client.hierarchy.domains()) {
await client.update(d, { '%hash%': { $in: [null, ''] } }, { $set: { '%hash%': now } })
await client.update(d, { '%hash%': { $in: [null, ''] } }, { '%hash%': now })
}
}
},

View File

@ -77,9 +77,7 @@ async function migrateTeamspaces (client: MigrationClient): Promise<void> {
type: { $exists: false }
},
{
$set: {
type: document.spaceType.DefaultTeamspaceType
}
type: document.spaceType.DefaultTeamspaceType
}
)
}
@ -95,9 +93,7 @@ async function migrateTeamspacesMixins (client: MigrationClient): Promise<void>
'attributes.attributeOf': oldSpaceTypeMixin
},
{
$set: {
'attributes.attributeOf': newSpaceTypeMixin
}
'attributes.attributeOf': newSpaceTypeMixin
}
)
@ -131,7 +127,7 @@ async function migrateRank (client: MigrationClient): Promise<void> {
for (const doc of documents) {
operations.push({
filter: { _id: doc._id },
update: { $set: { rank } }
update: { rank }
})
rank = makeRank(rank, undefined)
}

View File

@ -73,11 +73,9 @@ async function migrateFileVersions (client: MigrationClient): Promise<void> {
_class: file._class
},
{
$set: {
version: 1,
versions: 1,
file: fileVersionId
},
version: 1,
versions: 1,
file: fileVersionId,
$unset: {
metadata: 1
}

View File

@ -116,9 +116,7 @@ async function migrateDefaultTypeMixins (client: MigrationClient): Promise<void>
'attributes.attributeOf': oldSpaceTypeMixin
},
{
$set: {
'attributes.attributeOf': newSpaceTypeMixin
}
'attributes.attributeOf': newSpaceTypeMixin
}
)

View File

@ -155,9 +155,7 @@ async function migrateDefaultTypeMixins (client: MigrationClient): Promise<void>
'attributes.attributeOf': oldSpaceTypeMixin
},
{
$set: {
'attributes.attributeOf': newSpaceTypeMixin
}
'attributes.attributeOf': newSpaceTypeMixin
}
)

View File

@ -138,9 +138,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
DOMAIN_MODEL_TX,
{ _id: defaultTaskType._id },
{
$set: {
modifiedBy: core.account.System
}
modifiedBy: core.account.System
}
)
@ -148,9 +146,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
DOMAIN_MODEL_TX,
{ _id: defaultType._id },
{
$set: {
modifiedBy: core.account.System
}
modifiedBy: core.account.System
}
)
} else if (defaultTaskType?.modifiedBy !== core.account.System) {
@ -178,10 +174,8 @@ export async function migrateDefaultStatusesBase<T extends Task> (
DOMAIN_MODEL_TX,
{ _id: defaultType._id },
{
$set: {
'attributes.name': defaultType.attributes.name + ' (custom)',
objectId: newId
}
'attributes.name': defaultType.attributes.name + ' (custom)',
objectId: newId
}
)
await client.update(
@ -191,9 +185,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
objectSpace: core.space.Model
},
{
$set: {
objectId: newId
}
objectId: newId
}
)
await client.update(
@ -204,9 +196,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
'attributes.parent': defaultTypeId
},
{
$set: {
'attributes.parent': newId
}
'attributes.parent': newId
}
)
await client.update(
@ -216,7 +206,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
type: defaultTypeId
},
{
$set: { type: newId }
type: newId
}
)
}
@ -315,7 +305,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
}
counter++
await client.update(DOMAIN_MODEL_TX, { _id: ptsCreate._id }, { $set: { 'attributes.statuses': newUpdateStatuses } })
await client.update(DOMAIN_MODEL_TX, { _id: ptsCreate._id }, { 'attributes.statuses': newUpdateStatuses })
}
logger.log('projectTypeStatusesCreates updated: ', counter)
@ -337,7 +327,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
}
counter++
await client.update(DOMAIN_MODEL_TX, { _id: ptsUpdate._id }, { $set: { 'operations.statuses': newUpdateStatuses } })
await client.update(DOMAIN_MODEL_TX, { _id: ptsUpdate._id }, { 'operations.statuses': newUpdateStatuses })
}
logger.log('projectTypeStatusesUpdates updated: ', counter)
@ -365,11 +355,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
}
counter++
await client.update(
DOMAIN_MODEL_TX,
{ _id: ptsUpdate._id },
{ $set: { 'operations.$push.statuses': newPushStatus } }
)
await client.update(DOMAIN_MODEL_TX, { _id: ptsUpdate._id }, { 'operations.$push.statuses': newPushStatus })
}
logger.log('projectTypeStatusesPushes updated: ', counter)
@ -394,11 +380,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
}
counter++
await client.update(
DOMAIN_MODEL_TX,
{ _id: taskType._id },
{ $set: { 'attributes.statuses': newTaskTypeStatuses } }
)
await client.update(DOMAIN_MODEL_TX, { _id: taskType._id }, { 'attributes.statuses': newTaskTypeStatuses })
}
logger.log('allTaskTypes updated: ', counter)
@ -423,11 +405,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
}
counter++
await client.update(
DOMAIN_MODEL_TX,
{ _id: ttsUpdate._id },
{ $set: { 'operations.statuses': newTaskTypeUpdateStatuses } }
)
await client.update(DOMAIN_MODEL_TX, { _id: ttsUpdate._id }, { 'operations.statuses': newTaskTypeUpdateStatuses })
}
logger.log('allTaskTypeStatusesUpdates updated: ', counter)
@ -452,7 +430,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
if (newStatus !== baseTask.status) {
counter++
await client.update(DOMAIN_TASK, { _id: baseTask._id }, { $set: { status: newStatus } })
await client.update(DOMAIN_TASK, { _id: baseTask._id }, { status: newStatus })
}
}
logger.log('affectedBaseTasks updated: ', counter)
@ -474,11 +452,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
if (statusSet !== newStatusSet) {
counter++
await client.update(
DOMAIN_ACTIVITY,
{ _id: updateMessage._id },
{ $set: { 'attributeUpdates.set.0': newStatusSet } }
)
await client.update(DOMAIN_ACTIVITY, { _id: updateMessage._id }, { 'attributeUpdates.set.0': newStatusSet })
}
}
logger.log('Base task update messages updated: ', counter)
@ -490,7 +464,7 @@ export async function migrateDefaultStatusesBase<T extends Task> (
logger.log('Updating status from ' + statusIdBeingMigrated + ' to ' + newStatus, '')
await client.update(DOMAIN_STATUS, { _id: statusIdBeingMigrated }, { $set: { __superseded: true } })
await client.update(DOMAIN_STATUS, { _id: statusIdBeingMigrated }, { __superseded: true })
if (!createdStatuses.has(newStatus)) {
const oldStatus = oldStatuses.find((s) => s._id === statusIdBeingMigrated)
@ -554,12 +528,12 @@ export const taskOperation: MigrateOperation = {
await client.update(
DOMAIN_TX,
{ objectId: { $in: missing }, objectSpace: 'task:space:Statuses' },
{ $set: { objectSpace: core.space.Model } }
{ objectSpace: core.space.Model }
)
await client.update(
DOMAIN_MODEL_TX,
{ objectId: { $in: missing }, objectSpace: 'task:space:Statuses' },
{ $set: { objectSpace: core.space.Model } }
{ objectSpace: core.space.Model }
)
await client.move(DOMAIN_TX, { objectId: { $in: missing }, objectSpace: core.space.Model }, DOMAIN_MODEL_TX)
}

View File

@ -152,7 +152,7 @@ async function passIdentifierToParentInfo (client: MigrationClient): Promise<voi
if (p === undefined) continue
parent.identifier = p.identifier
}
await client.update(DOMAIN_TASK, { _id: issue._id }, { $set: { parents: issue.parents } })
await client.update(DOMAIN_TASK, { _id: issue._id }, { parents: issue.parents })
}
}
@ -166,7 +166,7 @@ async function migrateIdentifiers (client: MigrationClient): Promise<void> {
const project = projectsMap.get(issue.space)
if (project === undefined) continue
const identifier = project.identifier + '-' + issue.number
await client.update(DOMAIN_TASK, { _id: issue._id }, { $set: { identifier } })
await client.update(DOMAIN_TASK, { _id: issue._id }, { identifier })
}
}
@ -203,7 +203,7 @@ async function migrateDefaultStatuses (client: MigrationClient, logger: ModelLog
const newDefaultIssueStatus = getNewStatus(project.defaultIssueStatus)
if (project.defaultIssueStatus !== newDefaultIssueStatus) {
await client.update(DOMAIN_SPACE, { _id: project._id }, { $set: { defaultIssueStatus: newDefaultIssueStatus } })
await client.update(DOMAIN_SPACE, { _id: project._id }, { defaultIssueStatus: newDefaultIssueStatus })
}
const projectUpdateMessages = await client.find<DocUpdateMessage>(DOMAIN_ACTIVITY, {
@ -218,11 +218,7 @@ async function migrateDefaultStatuses (client: MigrationClient, logger: ModelLog
const newStatusSet = statusSet != null ? getNewStatus(statusSet as Ref<Status>) : statusSet
if (statusSet !== newStatusSet) {
await client.update(
DOMAIN_ACTIVITY,
{ _id: updateMessage._id },
{ $set: { 'attributeUpdates.set.0': newStatusSet } }
)
await client.update(DOMAIN_ACTIVITY, { _id: updateMessage._id }, { 'attributeUpdates.set.0': newStatusSet })
}
}
}
@ -300,9 +296,7 @@ async function migrateDefaultTypeMixins (client: MigrationClient): Promise<void>
'attributes.attributeOf': oldSpaceTypeMixin
},
{
$set: {
'attributes.attributeOf': newSpaceTypeMixin
}
'attributes.attributeOf': newSpaceTypeMixin
}
)
@ -342,9 +336,7 @@ async function migrateIssueStatuses (client: MigrationClient): Promise<void> {
'attributes.statusClass': core.class.Status
},
{
$set: {
'attributes.statusClass': tracker.class.IssueStatus
}
'attributes.statusClass': tracker.class.IssueStatus
}
)
await client.update(
@ -354,9 +346,7 @@ async function migrateIssueStatuses (client: MigrationClient): Promise<void> {
'attributes.ofAttribute': tracker.attribute.IssueStatus
},
{
$set: {
objectClass: tracker.class.IssueStatus
}
objectClass: tracker.class.IssueStatus
}
)
@ -367,9 +357,7 @@ async function migrateIssueStatuses (client: MigrationClient): Promise<void> {
ofAttribute: tracker.attribute.IssueStatus
},
{
$set: {
_class: tracker.class.IssueStatus
}
_class: tracker.class.IssueStatus
}
)
}

View File

@ -16,7 +16,6 @@
import { type ClientSocketFactory } from '@hcengineering/client'
import {
CollaborativeDoc,
type Account,
type AttachedData,
type AttachedDoc,
type Class,

View File

@ -1,3 +1,4 @@
import { AccountClient } from '@hcengineering/account-client'
import { Analytics } from '@hcengineering/analytics'
import core, {
Class,
@ -10,11 +11,9 @@ import core, {
Domain,
FindOptions,
Hierarchy,
IncOptions,
MigrationState,
ModelDb,
ObjQueryType,
PushOptions,
Rank,
Ref,
SortingOrder,
@ -25,18 +24,13 @@ import core, {
generateId
} from '@hcengineering/core'
import { makeRank } from '@hcengineering/rank'
import { AccountClient } from '@hcengineering/account-client'
import { StorageAdapter } from '@hcengineering/storage'
import { ModelLogger } from './utils'
/**
* @public
*/
export type MigrateUpdate<T extends Doc> = Partial<T> &
PushOptions<T> &
IncOptions<T> &
UnsetOptions &
Record<string, any>
export type MigrateUpdate<T extends Doc> = Partial<T> & UnsetOptions & Record<string, any>
/**
* @public

View File

@ -17,7 +17,12 @@
"dom"
],
"incremental": true,
"types": ["jest"],
"types": [
"jest"
],
"isolatedModules": true
}
},
"exclude": [
"node_modules/**"
]
}

View File

@ -745,9 +745,9 @@ export function decodeTokenPayload (token: string): any {
}
export function isAdminUser (): boolean {
// TODO: fixme
return false
// return decodeTokenPayload(getMetadata(plugin.metadata.Token) ?? '').admin === 'true'
const decodedToken = decodeTokenPayload(getMetadata(plugin.metadata.Token) ?? '')
console.log('decodedToken', decodedToken)
return decodedToken.extra?.admin === 'true'
}
export function isSpace (space: Doc): space is Space {

View File

@ -9,7 +9,7 @@
isUpgradingMode,
reduceCalls,
versionToString,
type BaseWorkspaceInfo
type WorkspaceInfoWithStatus
} from '@hcengineering/core'
import { getEmbeddedLabel } from '@hcengineering/platform'
import { isAdminUser, MessageBox } from '@hcengineering/presentation'
@ -30,7 +30,8 @@
ticker
} from '@hcengineering/ui'
import { workbenchId } from '@hcengineering/workbench'
import { getAllWorkspaces, getRegionInfo, performWorkspaceOperation, type RegionInfo } from '../utils'
import { getAllWorkspaces, getRegionInfo, performWorkspaceOperation } from '../utils'
import { RegionInfo } from '@hcengineering/account-client'
$: now = $ticker
@ -43,7 +44,7 @@
window.open(url, '_blank')
}
type WorkspaceInfo = BaseWorkspaceInfo & { attempts: number }
type WorkspaceInfo = WorkspaceInfoWithStatus & { attempts: number }
let workspaces: WorkspaceInfo[] = []
@ -73,9 +74,9 @@
$: sortedWorkspaces = workspaces
.filter(
(it) =>
((it.workspaceName?.includes(search) ?? false) ||
(it.workspaceUrl?.includes(search) ?? false) ||
it.workspace?.includes(search) ||
((it.name?.includes(search) ?? false) ||
(it.url?.includes(search) ?? false) ||
it.uuid?.includes(search) ||
it.createdBy?.includes(search)) &&
((showActive && isActiveMode(it.mode)) ||
(showArchived && isArchivingMode(it.mode)) ||
@ -92,7 +93,7 @@
case SortingRule.LastVisit:
return (b.lastVisit ?? 0) - (a.lastVisit ?? 0)
}
return (b.workspaceUrl ?? b.workspace).localeCompare(a.workspaceUrl ?? a.workspace)
return (b.url ?? b.uuid).localeCompare(a.url ?? a.uuid)
})
let backupIdx = new Map<string, number>()
@ -156,7 +157,7 @@
backupable = mixedBackupSorting
for (const [idx, it] of mixedBackupSorting.entries()) {
newBackupIdx.set(it.workspace, idx)
newBackupIdx.set(it.uuid, idx)
}
backupIdx = newBackupIdx
}
@ -184,7 +185,7 @@
let showOther: boolean = true
$: groupped = groupByArray(sortedWorkspaces, (it) => {
const lastUsageDays = Math.round((now - it.lastVisit) / (1000 * 3600 * 24))
const lastUsageDays = Math.round((now - (it.lastVisit ?? 0)) / (1000 * 3600 * 24))
return Object.entries(dayRanges).find(([_k, v]) => lastUsageDays <= v)?.[0] ?? 'Other'
})
@ -202,10 +203,10 @@
$: byVersion = groupByArray(
workspaces.filter((it) => {
const lastUsed = Math.round((now - it.lastVisit) / (1000 * 3600 * 24))
const lastUsed = Math.round((now - (it.lastVisit ?? 0)) / (1000 * 3600 * 24))
return isActiveMode(it.mode) && lastUsed < 1
}),
(it) => versionToString(it.version ?? { major: 0, minor: 0, patch: 0 })
(it) => versionToString({ major: it.versionMajor, minor: it.versionMinor, patch: it.versionPatch })
)
let superAdminMode = false
@ -334,7 +335,7 @@
message: getEmbeddedLabel(`Please confirm archive ${archivedV.length} workspaces`),
action: async () => {
void performWorkspaceOperation(
archivedV.map((it) => it.workspace),
archivedV.map((it) => it.uuid),
'archive'
)
}
@ -354,7 +355,7 @@
message: getEmbeddedLabel(`Please confirm migrate ${archivedV.length} workspaces`),
action: async () => {
await performWorkspaceOperation(
activeV.map((it) => it.workspace),
activeV.map((it) => it.uuid),
'migrate-to',
selectedRegionId
)
@ -365,9 +366,9 @@
{/if}
</svelte:fragment>
{#each v.slice(0, limit) as workspace}
{@const wsName = workspace.workspaceName ?? workspace.workspace}
{@const lastUsageDays = Math.round((now - workspace.lastVisit) / (1000 * 3600 * 24))}
{@const bIdx = backupIdx.get(workspace.workspace)}
{@const wsName = workspace.name}
{@const lastUsageDays = Math.round((now - (workspace.lastVisit ?? 0)) / (1000 * 3600 * 24))}
{@const bIdx = backupIdx.get(workspace.uuid)}
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<div class="flex fs-title cursor-pointer focused-button bordered">
@ -375,11 +376,7 @@
<span class="label overflow-label flex-row-center" style:width={'12rem'}>
{wsName}
<div class="ml-1">
<Button
icon={IconOpen}
size={'small'}
on:click={() => select(workspace.workspaceUrl ?? workspace.workspace)}
/>
<Button icon={IconOpen} size={'small'} on:click={() => select(workspace.url)} />
</div>
</span>
<div class="ml-1" style:width={'12rem'}>
@ -400,12 +397,9 @@
{workspace.attempts}
</span>
<!-- <span class="flex flex-between select-text overflow-label" style:width={'25rem'}>
{workspace.workspace}
</span> -->
<span class="flex flex-between" style:width={'5rem'}>
{#if workspace.progress !== 100 && workspace.progress !== 0}
({workspace.progress}%)
{#if workspace.processingProgress !== 100 && workspace.processingProgress !== 0}
({workspace.processingProgress}%)
{/if}
</span>
<span class="flex flex-between" style:width={'5rem'}>
@ -447,10 +441,10 @@
kind={'ghost'}
on:click={() => {
showPopup(MessageBox, {
label: getEmbeddedLabel(`Archive ${workspace.workspaceUrl}`),
label: getEmbeddedLabel(`Archive ${workspace.url}`),
message: getEmbeddedLabel('Please confirm'),
action: async () => {
await performWorkspaceOperation(workspace.workspace, 'archive')
await performWorkspaceOperation(workspace.uuid, 'archive')
}
})
}}
@ -465,10 +459,10 @@
label={getEmbeddedLabel('Unarchive')}
on:click={() => {
showPopup(MessageBox, {
label: getEmbeddedLabel(`Unarchive ${workspace.workspaceUrl}`),
label: getEmbeddedLabel(`Unarchive ${workspace.url}`),
message: getEmbeddedLabel('Please confirm'),
action: async () => {
await performWorkspaceOperation(workspace.workspace, 'unarchive')
await performWorkspaceOperation(workspace.uuid, 'unarchive')
}
})
}}
@ -483,10 +477,10 @@
label={getEmbeddedLabel('Migrate ' + (selectedRegionName ?? ''))}
on:click={() => {
showPopup(MessageBox, {
label: getEmbeddedLabel(`Migrate ${workspace.workspaceUrl}`),
label: getEmbeddedLabel(`Migrate ${workspace.url}`),
message: getEmbeddedLabel('Please confirm'),
action: async () => {
await performWorkspaceOperation(workspace.workspace, 'migrate-to', selectedRegionId)
await performWorkspaceOperation(workspace.uuid, 'migrate-to', selectedRegionId)
}
})
}}
@ -501,10 +495,10 @@
label={getEmbeddedLabel('Delete')}
on:click={() => {
showPopup(MessageBox, {
label: getEmbeddedLabel(`Delete ${workspace.workspaceUrl}`),
label: getEmbeddedLabel(`Delete ${workspace.url}`),
message: getEmbeddedLabel('Please confirm'),
action: async () => {
await performWorkspaceOperation(workspace.workspace, 'delete')
await performWorkspaceOperation(workspace.uuid, 'delete')
}
})
}}

View File

@ -40,6 +40,7 @@ async function toResponse (compression: string, data: any, response: http.Server
.writeHead(200, {
'content-type': 'application/json',
compression: 'snappy',
'content-encoding': 'snappy',
'keep-alive': 'timeout=5'
})
.end(await compress(JSON.stringify(data)))
@ -80,7 +81,7 @@ async function handleSQLFind (
const qid = ++queryId
try {
const lq = (json.query as string).toLowerCase()
if (lq.includes('begin') || lq.includes('commit') || lq.includes('rollback')) {
if (filterInappropriateQuries(lq)) {
console.error('not allowed', json.query)
response.writeHead(403).end('Not allowed')
return
@ -128,3 +129,7 @@ const reqHandler = (req: http.IncomingMessage, resp: http.ServerResponse): void
}
http.createServer(reqHandler).listen(port)
function filterInappropriateQuries (lq: string): boolean {
const harmfulPatterns = ['begin', 'commit', 'rollback', 'drop', 'alter', 'truncate']
return harmfulPatterns.some((pattern) => lq.includes(pattern))
}

View File

@ -13,10 +13,8 @@
// limitations under the License.
//
import aiBot from '@hcengineering/ai-bot'
import analyticsCollector from '@hcengineering/analytics-collector'
import chunter, { ChatMessage, ThreadMessage } from '@hcengineering/chunter'
import core, { AttachedDoc, Tx, TxCreateDoc, TxCUD, TxProcessor } from '@hcengineering/core'
import { ChatMessage } from '@hcengineering/chunter'
import { AttachedDoc, Tx, TxCreateDoc, TxCUD } from '@hcengineering/core'
import { ActivityInboxNotification, MentionInboxNotification } from '@hcengineering/notification'
import { TriggerControl } from '@hcengineering/server-core'
@ -133,78 +131,61 @@ import { TriggerControl } from '@hcengineering/server-core'
// }
// }
// eslint-disable-next-line @typescript-eslint/no-unused-vars
async function onBotDirectMessageSend (control: TriggerControl, message: ChatMessage): Promise<void> {
// TODO: FIXME
throw new Error('Not implemented')
// const account = control.modelDb.findAllSync(contact.class.PersonAccount, {
// _id: (message.createdBy ?? message.modifiedBy) as PersonId
// })[0]
// if (account === undefined) {
// return
// }
// const direct = (await getMessageDoc(message, control)) as DirectMessage
// if (direct === undefined) {
// return
// }
// const isAvailable = await isDirectAvailable(direct, control)
// if (!isAvailable) {
// return
// }
// let messageEvent: AIMessageEventRequest
// if (control.hierarchy.isDerived(message._class, chunter.class.ThreadMessage)) {
// messageEvent = getThreadMessageData(message as ThreadMessage, account.email)
// } else {
// messageEvent = getMessageData(direct, message, account.email)
// }
// const transferEvent = await createTransferEvent(control, message, account, messageEvent)
// const events = transferEvent !== undefined ? [messageEvent, transferEvent] : [messageEvent]
// await sendAIEvents(events, control.workspace.uuid, control.ctx)
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
async function onSupportWorkspaceMessage (control: TriggerControl, message: ChatMessage): Promise<void> {
// TODO: FIXME
throw new Error('Not implemented')
// const supportWorkspaceId = getSupportWorkspaceId()
// if (supportWorkspaceId === undefined) {
// return
// }
// if (control.workspace.uuid !== supportWorkspaceId) {
// return
// }
// if (!control.hierarchy.isDerived(message.attachedToClass, analyticsCollector.class.OnboardingChannel)) {
// return
// }
// const channel = (await getMessageDoc(message, control)) as OnboardingChannel
// if (channel === undefined) {
// return
// }
// const { workspaceId, email } = channel
// const account = control.modelDb.findAllSync(contact.class.PersonAccount, {
// _id: (message.createdBy ?? message.modifiedBy) as PersonId
// })[0]
// let data: AIMessageEventRequest
// if (control.hierarchy.isDerived(message._class, chunter.class.ThreadMessage)) {
// data = getThreadMessageData(message as ThreadMessage, account.email)
// } else {
// data = getMessageData(channel, message, account.email)
// }
// const transferEvent: AITransferEventRequest = {
// type: AIEventType.Transfer,
// createdOn: data.createdOn,
@ -219,39 +200,39 @@ async function onSupportWorkspaceMessage (control: TriggerControl, message: Chat
// messageId: message._id,
// parentMessageId: await getThreadParent(control, message)
// }
// await sendAIEvents([transferEvent], control.workspace.uuid, control.ctx)
}
export async function OnMessageSend (originTxs: TxCUD<AttachedDoc>[], control: TriggerControl): Promise<Tx[]> {
const { hierarchy } = control
const txes = originTxs.filter(
(it) =>
it._class === core.class.TxCreateDoc &&
hierarchy.isDerived(it.objectClass, chunter.class.ChatMessage) &&
!(it.modifiedBy === aiBot.account.AIBot || it.modifiedBy === core.account.System)
)
if (txes.length === 0) {
return []
}
for (const tx of txes) {
const isThread = hierarchy.isDerived(tx.objectClass, chunter.class.ThreadMessage)
const message = TxProcessor.createDoc2Doc(tx as TxCreateDoc<ChatMessage>)
const docClass = isThread ? (message as ThreadMessage).objectClass : message.attachedToClass
if (!hierarchy.isDerived(docClass, chunter.class.ChunterSpace)) {
continue
}
if (docClass === chunter.class.DirectMessage) {
await onBotDirectMessageSend(control, message)
}
if (docClass === analyticsCollector.class.OnboardingChannel) {
await onSupportWorkspaceMessage(control, message)
}
}
// TODO: FIXME
// const { hierarchy } = control
// const txes = originTxs.filter(
// (it) =>
// it._class === core.class.TxCreateDoc &&
// hierarchy.isDerived(it.objectClass, chunter.class.ChatMessage) &&
// !(it.modifiedBy === aiBot.account.AIBot || it.modifiedBy === core.account.System)
// )
// if (txes.length === 0) {
// return []
// }
// for (const tx of txes) {
// const isThread = hierarchy.isDerived(tx.objectClass, chunter.class.ThreadMessage)
// const message = TxProcessor.createDoc2Doc(tx as TxCreateDoc<ChatMessage>)
//
// const docClass = isThread ? (message as ThreadMessage).objectClass : message.attachedToClass
//
// if (!hierarchy.isDerived(docClass, chunter.class.ChunterSpace)) {
// continue
// }
//
// if (docClass === chunter.class.DirectMessage) {
// await onBotDirectMessageSend(control, message)
// }
//
// if (docClass === analyticsCollector.class.OnboardingChannel) {
// await onSupportWorkspaceMessage(control, message)
// }
// }
return []
}

View File

@ -15,7 +15,7 @@
import { TriggerControl } from '@hcengineering/server-core'
import contact, { Employee, type Person } from '@hcengineering/contact'
import { PersonId, toIdMap, parseSocialIdString, type Ref } from '@hcengineering/core'
import { parseSocialIdString, PersonId, type Ref, toIdMap } from '@hcengineering/core'
export async function getTriggerCurrentPerson (control: TriggerControl): Promise<Person | undefined> {
const { type, value } = parseSocialIdString(control.txFactory.account)
@ -79,9 +79,13 @@ export async function getAllSocialStringsByPersonId (
export async function getPerson (control: TriggerControl, personId: PersonId): Promise<Person | undefined> {
const socialId = (await control.findAll(control.ctx, contact.class.SocialIdentity, { key: personId }))[0]
const person = (await control.findAll(control.ctx, contact.class.Person, { _id: socialId.attachedTo }))[0]
return person
if (socialId === undefined) {
control.ctx.error('Cannot find social id', { key: personId })
return undefined
}
return (await control.findAll(control.ctx, contact.class.Person, { _id: socialId.attachedTo }))[0]
}
export async function getPersons (control: TriggerControl, personIds: PersonId[]): Promise<Person[]> {

View File

@ -147,16 +147,12 @@ async function notifyByEmail (
message?: ActivityMessage
): Promise<void> {
// TODO: FIXME
throw new Error('Not implemented')
// const account = receiver.account
// if (account === undefined) {
// return
// }
// const senderPerson = sender.person
// const senderName = senderPerson !== undefined ? formatName(senderPerson.name, control.branding?.lastNameFirst) : ''
// const content = await getContentByTemplate(doc, senderName, type, control, '', data, message)
// if (content !== undefined) {
// await sendEmailNotification(control.ctx, content.text, content.html, content.subject, account.email)
@ -173,7 +169,6 @@ const SendEmailNotifications: NotificationProviderFunc = async (
message?: ActivityMessage
): Promise<Tx[]> => {
// TODO: FIXME
throw new Error('Not implemented')
// if (types.length === 0) {
// return []
// }
@ -190,7 +185,7 @@ const SendEmailNotifications: NotificationProviderFunc = async (
// await notifyByEmail(control, type._id, object, sender, receiver, data, message)
// }
// return []
return []
}
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type

View File

@ -45,7 +45,6 @@ export async function getValue (control: TriggerControl, context: Record<string,
async function getEmployee (control: TriggerControl, _id: PersonId): Promise<Person | undefined> {
// TODO: FIXME
// Related to integrations
throw new Error('Not implemented')
// const employeeAccount = control.modelDb.findAllSync(contact.class.PersonAccount, {
// _id: _id as PersonId
// })[0]
@ -57,6 +56,7 @@ async function getEmployee (control: TriggerControl, _id: PersonId): Promise<Per
// )[0]
// return employee
// }
return undefined
}
export async function getOwnerFirstName (

View File

@ -114,7 +114,6 @@ export async function GetCurrentEmployeeTG (
context: Record<string, Doc>
): Promise<string | undefined> {
// TODO: FIXME
throw new Error('Not implemented')
// const account = await control.modelDb.findOne(contact.class.PersonAccount, {
// _id: control.txFactory.account as PersonId
// })
@ -125,6 +124,8 @@ export async function GetCurrentEmployeeTG (
// if (employee !== undefined) {
// return await getContactChannel(control, employee, contact.channelProvider.Telegram)
// }
return undefined
}
export async function GetIntegrationOwnerTG (
@ -132,7 +133,6 @@ export async function GetIntegrationOwnerTG (
context: Record<string, Doc>
): Promise<string | undefined> {
// TODO: FIXME
throw new Error('Not implemented')
// const value = context[setting.class.Integration] as Integration
// if (value === undefined) return
// const account = await control.modelDb.findOne(contact.class.PersonAccount, {
@ -145,6 +145,8 @@ export async function GetIntegrationOwnerTG (
// if (employee !== undefined) {
// return await getContactChannel(control, employee, contact.channelProvider.Telegram)
// }
return undefined
}
async function getContactChannel (
@ -261,7 +263,6 @@ const SendTelegramNotifications: NotificationProviderFunc = async (
message?: ActivityMessage
): Promise<Tx[]> => {
// TODO: FIXME
throw new Error('Not implemented')
// if (types.length === 0) {
// return []
// }
@ -312,7 +313,7 @@ const SendTelegramNotifications: NotificationProviderFunc = async (
// })
// }
// return []
return []
}
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type

View File

@ -37,7 +37,14 @@ import { getSocialStrings } from '@hcengineering/server-contact'
import serverCore, { TriggerControl } from '@hcengineering/server-core'
import { NOTIFICATION_BODY_SIZE } from '@hcengineering/server-notification'
import { stripTags } from '@hcengineering/text'
import tracker, { Component, Issue, IssueParentInfo, TimeSpendReport, trackerId, type Project } from '@hcengineering/tracker'
import tracker, {
Component,
Issue,
IssueParentInfo,
TimeSpendReport,
trackerId,
type Project
} from '@hcengineering/tracker'
import { workbenchId } from '@hcengineering/workbench'
async function updateSubIssues (

View File

@ -272,7 +272,10 @@ export class WorkspaceStatusMongoDbCollection implements DbCollection<WorkspaceS
}
async find (query: Query<WorkspaceStatus>, sort?: Sort<WorkspaceStatus>, limit?: number): Promise<WorkspaceStatus[]> {
return (await this.wsCollection.find(this.toWsQuery(query), this.toWsSort(sort), limit)).map((ws) => ws.status)
return (await this.wsCollection.find(this.toWsQuery(query), this.toWsSort(sort), limit)).map((ws) => ({
...ws.status,
workspaceUuid: ws.uuid
}))
}
async findOne (query: Query<WorkspaceStatus>): Promise<WorkspaceStatus | null> {

View File

@ -91,6 +91,8 @@ import {
// Move to config?
const processingTimeoutMs = 30 * 1000
const ADMIN_EMAILS = new Set(process.env.ADMIN_EMAILS?.split(',') ?? [])
/* =================================== */
/* ============OPERATIONS============= */
/* =================================== */
@ -126,11 +128,12 @@ export async function login (
const isConfirmed = emailSocialId.verifiedOn != null
ctx.info('Login succeeded', { email, normalizedEmail, isConfirmed, emailSocialId })
const isAdmin: Record<string, string> = ADMIN_EMAILS.has(email.trim()) ? { admin: 'true' } : {}
ctx.info('Login succeeded', { email, normalizedEmail, isConfirmed, emailSocialId, ...isAdmin })
return {
account: existingAccount.uuid,
token: isConfirmed ? generateToken(existingAccount.uuid) : undefined
token: isConfirmed ? generateToken(existingAccount.uuid, undefined, isAdmin) : undefined
}
} catch (err: any) {
Analytics.handleError(err)
@ -835,7 +838,7 @@ export async function listWorkspaces (
): Promise<WorkspaceInfoWithStatus[]> {
const { extra } = decodeTokenVerbose(ctx, token)
if (!['tool', 'backup', 'admin'].includes(extra?.service)) {
if (!['tool', 'backup', 'admin'].includes(extra?.service) && extra?.admin !== 'true') {
throw new PlatformError(new Status(Severity.ERROR, platform.status.Forbidden, {}))
}

View File

@ -16,7 +16,6 @@
import {
type WorkspaceIds,
type Class,
type Data,
type Doc,
type DocumentQuery,
type Domain,
@ -86,13 +85,6 @@ export interface DbAdapter extends LowLevelStorage {
tx: (ctx: MeasureContext, ...tx: Tx[]) => Promise<TxResult[]>
// Bulk update operations
update: <T extends Doc>(
ctx: MeasureContext,
domain: Domain,
operations: Map<Ref<Doc>, Partial<Data<T>>>
) => Promise<void>
// Allow to register a handler to listen for domain operations
on?: (handler: DbAdapterHandler) => void
}

View File

@ -15,7 +15,6 @@
import core, {
type Class,
type Data,
type Doc,
type DocumentQuery,
type DocumentUpdate,
@ -106,12 +105,6 @@ export class DummyDbAdapter implements DbAdapter {
return Promise.resolve('')
}
async update<T extends Doc>(
ctx: MeasureContext,
domain: Domain,
operations: Map<Ref<Doc>, Partial<Data<T>>>
): Promise<void> {}
async groupBy<T, P extends Doc>(
ctx: MeasureContext,
domain: Domain,

View File

@ -781,7 +781,9 @@ export class FullTextIndexPipeline implements FullTextPipeline {
await pushToIndex()
await pushQueue.waitProcessing()
await ctx.with('update-index-state', {}, (ctx) => this.storage.update(ctx, DOMAIN_DOC_INDEX_STATE, docUpdates))
await ctx.with('update-index-state', {}, (ctx) =>
this.storage.rawUpdate(DOMAIN_DOC_INDEX_STATE, DOMAIN_DOC_INDEX_STATE, docUpdates)
)
}
private createContextData (): SessionDataImpl {

View File

@ -1163,61 +1163,6 @@ abstract class MongoAdapterBase implements DbAdapter {
})
}
update (ctx: MeasureContext, domain: Domain, operations: Map<Ref<Doc>, Partial<Doc>>): Promise<void> {
return ctx.with('update', { domain }, async () => {
const coll = this.collection(domain)
// remove old and insert new ones
const ops = Array.from(operations.entries())
let skip = 500
while (ops.length > 0) {
const part = ops.splice(0, skip)
try {
await ctx.with(
'bulk-update',
{},
() => {
return coll.bulkWrite(
part.map((it) => {
const { $unset, ...set } = it[1] as any
if ($unset !== undefined) {
for (const k of Object.keys(set)) {
if ($unset[k] === '') {
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete $unset[k]
}
}
}
return {
updateOne: {
filter: { _id: it[0] },
update: {
$set: { ...set, '%hash%': this.curHash() },
...($unset !== undefined ? { $unset } : {})
}
}
}
}),
{
ordered: false
}
)
},
{
updates: part.length
}
)
} catch (err: any) {
ctx.error('failed on bulk write', { error: err, skip })
if (skip !== 1) {
ops.push(...part)
skip = 1 // Let's update one by one, to loose only one failed variant.
}
}
}
})
}
clean (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<void> {
return ctx.with('clean', {}, async () => {
if (docs.length > 0) {

View File

@ -1,4 +1,19 @@
import core, {
Hierarchy,
MeasureMetricsContext,
ModelDb,
TxFactory,
type DocumentUpdate,
type PersonId,
type Ref,
type Space,
type Tx,
type WorkspaceUuid
} from '@hcengineering/core'
import { PostgresAdapter } from '../storage'
import { convertArrayParams, decodeArray } from '../utils'
import { genMinModel, test, type ComplexClass } from './minmodel'
import { createDummyClient, type TypedQuery } from './utils'
describe('array conversion', () => {
it('should handle undefined parameters', () => {
@ -55,3 +70,93 @@ describe('array decoding', () => {
expect(decodeArray('{"first \\"quote\\"","second \\"quote\\""}')).toEqual(['first "quote"', 'second "quote"'])
})
})
const factory = new TxFactory('email:test' as PersonId)
function upd (id: string, partial: DocumentUpdate<ComplexClass>): Tx {
return factory.createTxUpdateDoc<ComplexClass>(
test.class.ComplexClass,
core.space.Workspace,
id as Ref<ComplexClass>,
partial
)
}
describe('query to sql conversion tests', () => {
it('check dummy db client', async () => {
const queries: TypedQuery[] = []
const c = createDummyClient(queries)
await c.execute('select now()')
expect(queries[0].query).toEqual('select now()')
})
it('check simple update', async () => {
const { adapter, ctx, queries } = createTestContext()
await adapter.tx(
ctx,
upd('obj1', {
stringField: 'test'
})
)
expect(queries[0].query).toEqual(
'UPDATE pg_testing SET "modifiedBy" = update_data."_modifiedBy", "modifiedOn" = update_data."_modifiedOn", "%hash%" = update_data."_%hash%", data = COALESCE(data || update_data._data)\n FROM (values ($2::text, $3::text,$4::bigint,$5::text,$6::jsonb)) AS update_data(__id, "_modifiedBy","_modifiedOn","_%hash%","_data")\n WHERE "workspaceId" = $1::uuid AND "_id" = update_data.__id'
)
})
it('check space update', async () => {
const { adapter, ctx, queries } = createTestContext()
await adapter.tx(
ctx,
upd('obj1', {
space: 'new-space' as Ref<Space>
})
)
expect(queries[0].query).toEqual(
'UPDATE pg_testing SET "modifiedBy" = update_data."_modifiedBy", "modifiedOn" = update_data."_modifiedOn", "%hash%" = update_data."_%hash%", "space" = update_data."_space"\n FROM (values ($2::text, $3::text,$4::bigint,$5::text,$6::text)) AS update_data(__id, "_modifiedBy","_modifiedOn","_%hash%","_space")\n WHERE "workspaceId" = $1::uuid AND "_id" = update_data.__id'
)
})
it('check few documents update', async () => {
const { adapter, ctx, queries } = createTestContext()
await adapter.tx(
ctx,
upd('obj1', {
stringField: 'test'
}),
upd('obj2', {
stringField: 'test2'
}),
upd('obj3', {
stringField: 'test'
})
)
expect(queries[0].query).toEqual(
'UPDATE pg_testing SET "modifiedBy" = update_data."_modifiedBy", "modifiedOn" = update_data."_modifiedOn", "%hash%" = update_data."_%hash%", data = COALESCE(data || update_data._data)\n FROM (values ($2::text, $3::text,$4::bigint,$5::text,$6::jsonb),($7::text, $8::text,$9::bigint,$10::text,$11::jsonb),($12::text, $13::text,$14::bigint,$15::text,$16::jsonb)) AS update_data(__id, "_modifiedBy","_modifiedOn","_%hash%","_data")\n WHERE "workspaceId" = $1::uuid AND "_id" = update_data.__id'
)
})
})
function createTestContext (): { adapter: PostgresAdapter, ctx: MeasureMetricsContext, queries: TypedQuery[] } {
const ctx = new MeasureMetricsContext('test', {})
const queries: TypedQuery[] = []
const c = createDummyClient(queries)
const minModel = genMinModel()
const hierarchy = new Hierarchy()
for (const tx of minModel) {
hierarchy.tx(tx)
}
const modelDb = new ModelDb(hierarchy)
modelDb.addTxes(ctx, minModel, true)
const adapter = new PostgresAdapter(
c,
{
url: () => 'test',
close: () => {}
},
'workspace' as WorkspaceUuid,
hierarchy,
modelDb,
'test'
)
return { adapter, ctx, queries }
}

View File

@ -14,19 +14,20 @@
//
import core, {
type PersonId,
type Arr,
type AttachedDoc,
type Class,
ClassifierKind,
type Data,
type Doc,
type Domain,
DOMAIN_DOC_INDEX_STATE,
DOMAIN_MODEL,
DOMAIN_RELATION,
DOMAIN_TX,
type Mixin,
type Obj,
type PersonId,
type Ref,
type TxCreateDoc,
type TxCUD,
@ -72,15 +73,33 @@ export interface AttachedComment extends AttachedDoc {
message: string
}
export interface ComplexClass extends Doc {
stringField: string
numberField: number
booleanField: boolean
arrayField: string[]
numberArrayField: number[]
}
export interface ComplexMixin extends Mixin<ComplexClass> {
stringField: string
numberField: number
booleanField: boolean
arrayField: string[]
numberArrayField: number[]
}
/**
* @public
*/
export const test = plugin('test' as Plugin, {
mixin: {
TestMixin: '' as Ref<Mixin<TestMixin>>
TestMixin: '' as Ref<Mixin<TestMixin>>,
ComplexMixin: '' as Ref<Mixin<ComplexMixin>>
},
class: {
TestComment: '' as Ref<Class<AttachedComment>>
TestComment: '' as Ref<Class<AttachedComment>>,
ComplexClass: '' as Ref<Class<ComplexClass>>
}
})
@ -197,6 +216,23 @@ export function genMinModel (): TxCUD<Doc>[] {
kind: ClassifierKind.CLASS
})
)
txes.push(
createClass(test.class.ComplexClass, {
label: 'ComplexClass' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.CLASS,
domain: 'pg-testing' as Domain
})
)
txes.push(
createClass(test.mixin.ComplexMixin, {
label: 'ComplexMixin' as IntlString,
extends: test.class.ComplexClass,
kind: ClassifierKind.MIXIN,
domain: 'pg-testing' as Domain
})
)
const u1 = 'User1' as PersonId
const u2 = 'User2' as PersonId

View File

@ -0,0 +1,18 @@
import type { DBClient } from '../client'
export interface TypedQuery {
query: string
params?: any[]
}
export function createDummyClient (queries: TypedQuery[]): DBClient {
const client: DBClient = {
execute: async (query, params) => {
queries.push({ query, params })
return Object.assign([], { count: 0 })
},
raw: () => jest.fn() as any,
reserve: async () => client,
release: jest.fn()
}
return client
}

View File

@ -83,8 +83,7 @@ class GreenClient implements DBClient {
release (): void {}
async reserve (): Promise<DBClient> {
// We do reserve of connection, if we need it.
return createGreenDBClient(this.url, this.token, await this.connection.reserve(), this.decoder)
return createDBClient(await this.connection.reserve())
}
raw (): postgres.Sql {

View File

@ -1684,18 +1684,6 @@ abstract class PostgresAdapterBase implements DbAdapter {
})
}
async update (ctx: MeasureContext, domain: Domain, operations: Map<Ref<Doc>, Partial<Doc>>): Promise<void> {
const ids = [...operations.entries()]
const groups = groupByArray(ids, (it) => JSON.stringify(it[1]))
for (const [, values] of groups.entries()) {
const ids = values.map((it) => it[0])
while (ids.length > 0) {
const part = ids.splice(0, 200)
await this.rawUpdate(domain, { _id: { $in: part } }, values[0][1])
}
}
}
@withContext('insert')
async insert (ctx: MeasureContext, domain: string, docs: Doc[]): Promise<TxResult> {
await this.upload(ctx, domain as Domain, docs, false)
@ -1714,7 +1702,7 @@ interface OperationBulk {
const initRateLimit = new RateLimiter(1)
class PostgresAdapter extends PostgresAdapterBase {
export class PostgresAdapter extends PostgresAdapterBase {
async init (
ctx: MeasureContext,
contextVars: Record<string, any>,
@ -1819,7 +1807,7 @@ class PostgresAdapter extends PostgresAdapterBase {
result.push(res)
}
}
// TODO: Optimize updates
if (ops.updates.length > 0) {
const res = await this.txUpdateDoc(ctx, domain, ops.updates, domainFields)
for (const r of res) {

View File

@ -42,7 +42,7 @@ export function doSessionOp (
): void {
if (data.session instanceof Promise) {
// We need to copy since we will out of protected buffer area
const msgCopy = Buffer.copyBytesFrom(msg)
const msgCopy = Buffer.copyBytesFrom(new Uint8Array(msg))
void data.session
.then((_session) => {
data.session = _session

View File

@ -111,7 +111,7 @@ export async function initModel (
}
try {
logger.log('creating database...', workspaceId)
logger.log('creating database...', { workspaceId })
const firstTx: Tx = {
_class: core.class.Tx,
_id: 'first-tx' as Ref<Tx>,

View File

@ -12,7 +12,6 @@ services:
image: cockroachdb/cockroach:latest-v24.2
ports:
- '26258:26257'
- '8089:8080'
command: start-single-node --insecure
restart: unless-stopped
minio:

View File

@ -219,13 +219,14 @@ export class Transactor extends DurableObject<Env> {
const st = Date.now()
const r = this.sessionManager.handleRequest(this.measureCtx, s.session, cs, request, this.workspace)
void r.finally(() => {
const time = Date.now() - st
console.log({
message: 'handle-request',
message: 'handle-request: ' + time,
method: request.method,
params: request.params,
workspace: s.workspaceId,
user: s.session.getUser(),
time: Date.now() - st
time
})
})
this.ctx.waitUntil(r)