Merge branch 'develop' into staging-new

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2025-05-06 11:52:38 +07:00
commit 527c70e473
No known key found for this signature in database
GPG Key ID: BD80F68D68D8F7F2
27 changed files with 829 additions and 139 deletions

4
.vscode/launch.json vendored
View File

@ -459,14 +459,14 @@
"MINIO_ENDPOINT": "localhost:9000",
"TRANSACTOR_URL": "ws://localhost:3332",
"ACCOUNTS_URL": "http://localhost:3000",
"ACCOUNT_DB_URL": "mongodb://localhost:27017",
"ACCOUNT_DB_URL": "postgresql://root@huly.local:26257/defaultdb?sslmode=disable",
// "ACCOUNT_DB_URL": "postgresql://postgres:example@localhost:5433",
// "DB_URL": "postgresql://postgres:example@localhost:5433",
"DB_URL": "postgresql://root@huly.local:26257/defaultdb?sslmode=disable",
"MONGO_URL": "mongodb://localhost:27017",
"TELEGRAM_DATABASE": "telegram-service",
"REKONI_URL": "http://localhost:4004",
"MODEL_VERSION": "0.7.1"
"MODEL_VERSION": "0.7.75"
},
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
"sourceMaps": true,

View File

@ -1,4 +1,4 @@
STORAGE_CONFIG="minio|minio?accessKey=minioadmin&secretKey=minioadmin"
STORAGE_CONFIG="datalake|http://huly.local:4030"
MONGO_URL=mongodb://mongodb:27017?compressors=snappy
DB_URL_PG=postgresql://postgres:example@postgres:5432
BACKUP_STORAGE_CONFIG="minio|minio?accessKey=minioadmin&secretKey=minioadmin"

View File

@ -109,6 +109,10 @@ services:
- 9001:9001
volumes:
- files:/data
healthcheck:
test: ["CMD", "mc", "ready", "local"]
interval: 5s
retries: 10
restart: unless-stopped
elastic:
image: 'elasticsearch:7.14.2'
@ -128,7 +132,7 @@ services:
healthcheck:
interval: 20s
retries: 10
test: curl -s http://localhost:9200/_cluster/health | grep -vq '"status":"red"'
test: curl -f http://localhost:9000/minio/health/live
restart: unless-stopped
account:
image: hardcoreeng/account
@ -261,7 +265,9 @@ services:
- SERVER_SECRET=secret
- ACCOUNTS_URL=http://huly.local:3000
- STATS_URL=http://huly.local:4900
- UPLOAD_URL=/files
- FILES_URL=http://huly.local:4030/blob/:workspace/:blobId/:filename
- UPLOAD_URL=http://huly.local:4030/upload/form-data/:workspace
- PREVIEW_CONFIG=http://huly.local:4030/image/fit=cover,width=:width,height=:height,dpr=:dpr/:workspace/:blobId
- GMAIL_URL=http://huly.local:8088
- CALENDAR_URL=http://huly.local:8095
- TELEGRAM_URL=http://huly.local:8086
@ -505,6 +511,30 @@ services:
- STATS_URL=http://huly.local:4900
- STORAGE_CONFIG=${STORAGE_CONFIG}
- ACCOUNTS_URL=http://huly.local:3000
datalake:
image: hardcoreeng/datalake
extra_hosts:
- 'huly.local:host-gateway'
depends_on:
minio:
condition: service_healthy
cockroach:
condition: service_started
stats:
condition: service_started
account:
condition: service_started
ports:
- 4030:4030
environment:
- PORT=4030
- SECRET=secret
- ACCOUNTS_URL=http://huly.local:3000
- STATS_URL=http://huly.local:4900
- STREAM_URL=http://huly.local:1080/recording
- DB_URL=${DB_CR_URL}
- BUCKETS=blobs,eu|http://minio:9000?accessKey=minioadmin&secretKey=minioadmin
restart: unless-stopped
volumes:
db:
dbpg:

View File

@ -1,7 +1,9 @@
{
"ACCOUNTS_URL": "/account",
"COLLABORATOR_URL": "ws://huly.local:3078",
"UPLOAD_URL": "/files",
"FILES_URL": "http://huly.local:4030/blob/:workspace/:blobId/:filename",
"UPLOAD_URL": "http://huly.local:4030/upload/form-data/:workspace",
"PREVIEW_CONFIG": "http://huly.local:4030/image/fit=cover,width=:width,height=:height,dpr=:dpr/:workspace/:blobId",
"TELEGRAM_URL": "http://huly.local:8086",
"GMAIL_URL": "http://huly.huly.local:8088",
"CALENDAR_URL": "http://localhost:8095",

View File

@ -479,7 +479,17 @@ export function defineViewlets (builder: Builder): void {
configOptions: {
strict: true
},
config: ['subIssues', 'priority', 'component', 'dueDate', 'labels', 'estimation', 'attachments', 'comments']
config: [
'subIssues',
'priority',
'component',
'milestone',
'dueDate',
'labels',
'estimation',
'attachments',
'comments'
]
},
tracker.viewlet.IssueKanban
)

View File

@ -23,8 +23,8 @@ import {
} from '@hcengineering/model'
import { DOMAIN_PREFERENCE } from '@hcengineering/preference'
import view, { type Filter, type FilteredView, type ViewletPreference, viewId } from '@hcengineering/view'
import { getSocialKeyByOldAccount, getUniqueAccounts } from '@hcengineering/model-core'
import { type AccountUuid, MeasureMetricsContext } from '@hcengineering/core'
import { getSocialIdFromOldAccount, getSocialKeyByOldAccount, getUniqueAccounts } from '@hcengineering/model-core'
import core, { type AccountUuid, MeasureMetricsContext, type PersonId } from '@hcengineering/core'
import { DOMAIN_VIEW } from '.'
@ -171,6 +171,81 @@ async function migrateSocialIdsToGlobalAccounts (client: MigrationClient): Promi
ctx.info('finished processing view filtered view users ', {})
}
async function migrateAccsInSavedFilters (client: MigrationClient): Promise<void> {
const ctx = new MeasureMetricsContext('view migrateAccsInSavedFilters', {})
const hierarchy = client.hierarchy
const socialKeyByAccount = await getSocialKeyByOldAccount(client)
const socialIdBySocialKey = new Map<string, PersonId | null>()
const socialIdByOldAccount = new Map<string, PersonId | null>()
ctx.info('processing view filtered view accounts in filters ', {})
const affectedViews = await client.find<FilteredView>(DOMAIN_VIEW, {
_class: view.class.FilteredView,
filters: { $regex: '%core:class:Account%' }
})
for (const view of affectedViews) {
const filters = JSON.parse(view.filters)
const newFilters = []
let needUpdate = false
for (const filter of filters) {
const key = filter?.key
if (key == null) {
newFilters.push(filter)
continue
}
const type = key.attribute?.type
const objClass = key._class
const objKey = key.key
if (type == null || objClass == null || objKey == null) {
newFilters.push(filter)
continue
}
if (type._class !== 'core:class:RefTo' || type.to !== 'core:class:Account') {
newFilters.push(filter)
continue
}
const newAttrType = hierarchy.getAttribute(objClass, objKey)
if (newAttrType.type._class !== core.class.TypePersonId) {
newFilters.push(filter)
continue
}
const newFilter = { ...filter }
newFilter.key.attribute.type = {
_class: newAttrType.type._class,
label: newAttrType.type.label
}
const oldValue = newFilter.value
newFilter.value = []
for (const accId of oldValue) {
const socialId = await getSocialIdFromOldAccount(
client,
accId,
socialKeyByAccount,
socialIdBySocialKey,
socialIdByOldAccount
)
newFilter.value.push(socialId ?? accId)
}
newFilters.push(newFilter)
needUpdate = true
}
if (needUpdate) {
await client.update(DOMAIN_VIEW, { _id: view._id }, { filters: JSON.stringify(newFilters) })
}
}
ctx.info('finished processing view filtered view accounts in filters ', {})
}
export const viewOperation: MigrateOperation = {
async migrate (client: MigrationClient, mode): Promise<void> {
await tryMigrate(mode, client, viewId, [
@ -193,6 +268,11 @@ export const viewOperation: MigrateOperation = {
state: 'social-ids-to-global-accounts',
mode: 'upgrade',
func: migrateSocialIdsToGlobalAccounts
},
{
state: 'accs-in-saved-filters',
mode: 'upgrade',
func: migrateAccsInSavedFilters
}
])
},

View File

@ -10,28 +10,71 @@ In order to be able to install required packages, you will need to obtain GitHub
npm install @hcengineering/api-client
```
## WebSocket Client vs REST Client
The api client package provides two main client variants: a WebSocket client and a REST client. The WebSocket client holds persistent connection to the Huly Platform API. The REST client uses standard HTTP requests to perform operations.
### WebSocket Client
```ts
import { connect } from '@hcengineering/api-client'
// Connect to Huly
const client = await connect('https://huly.app', {
email: 'johndoe@example.com',
password: 'password',
workspace: 'my-workspace',
})
// Use the client to perform operations
...
// Close the client when done
await client.close()
```
### REST Client
```ts
import { connectRest } from '@hcengineering/api-client'
// Connect to Huly
const client = await connectRest('https://huly.app', {
email: 'johndoe@example.com',
password: 'password',
workspace: 'my-workspace'
})
// Use the client to perform operations
...
```
## Authentication
There are two ways to connect to the platform, using email and password, or using token.
The client supports two authentication methods: using email and password, or using a token.
When authenticated, the client will have access to the same resources as the user.
> Note: The examples below use the WebSocket client (`connect`). To use the REST client instead, import and call `connectRest` with the same options.
Parameters:
- `url`: URL of the Huly instance
- `url`: URL of the Huly instance, for Huly Cloud use `https://huly.app`
- `options`: Connection options
- `workspace`: Name of the workspace to connect to
- `workspace`: Name of the workspace to connect to, the workspace name can be found in the URL of the workspace: `https://huly.app/workbench/<workspace-name>`
- `token`: Optional authentication token
- `email`: Optional user email
- `password`: Optional user password
- `connectionTimeout`: Optional connection timeout
- `socketFactory`: Optional socket factory
### Using Email and Password
```ts
const client = await connect('http://localhost:8087', {
email: 'user1',
password: '1234',
workspace: 'ws1'
import { connect } from '@hcengineering/api-client'
const client = await connect('https://huly.app', {
email: 'johndoe@example.com',
password: 'password',
workspace: 'my-workspace'
})
...
@ -42,9 +85,11 @@ await client.close()
### Using Token
```ts
const client = await connect('http://localhost:8087', {
import { connect } from '@hcengineering/api-client'
const client = await connect('https://huly.app', {
token: '...',
workspace: 'ws1'
workspace: 'my-workspace'
})
...
@ -52,7 +97,9 @@ const client = await connect('http://localhost:8087', {
await client.close()
```
## Example usage
## Client API
The client provides a set of methods for interacting with the Huly Platform API. This section describes the main methods available in the client.
### Fetch API
@ -64,7 +111,7 @@ Retrieves a single document matching the query criteria.
Parameters:
- `_class`: Class of the object to find, results will include all subclasses of the targe class
- `_class`: Class of the object to find, results will include all subclasses of the target class
- `query`: Query criteria
- `options`: Find options
- `limit`: Limit the number of results returned
@ -94,7 +141,7 @@ Retrieves multiple document matching the query criteria.
Parameters:
- `_class`: Class of the object to find, results will include all subclasses of the targe class
- `_class`: Class of the object to find, results will include all subclasses of the target class
- `query`: Query criteria
- `options`: Find options
- `limit`: Limit the number of results returned
@ -160,7 +207,7 @@ const personId = await client.createDoc(
#### updateDoc
Updates exising document.
Updates existing document.
Parameters:
@ -188,7 +235,7 @@ await client.updateDoc(
#### removeDoc
Removes exising document.
Removes existing document.
Parameters:
@ -258,7 +305,7 @@ await client.addCollection(
#### updateCollection
Updates exising attached document in collection.
Updates existing attached document in collection.
Parameters:
@ -292,7 +339,7 @@ await client.updateCollection(
#### removeCollection
Removes exising attached document from collection.
Removes existing attached document from collection.
Parameters:

View File

@ -36,11 +36,15 @@
docObject[type] = users
}
$: reviewers = docObject?.reviewers ?? []
$: approvers = docObject?.approvers ?? []
$: coAuthors = docObject?.coAuthors ?? []
</script>
{#if docObject !== undefined}
<div class="root">
<DocTeam controlledDoc={docObject} {space} on:update={handleUpdate} />
<DocTeam controlledDoc={docObject} {space} on:update={handleUpdate} {approvers} {reviewers} {coAuthors} />
</div>
{/if}

View File

@ -75,12 +75,10 @@
const client = getClient()
function group (
txMap: Map<Ref<Person>, Tx[]>,
persons: Ref<Person>[],
txes: Tx[],
from: Timestamp,
to: Timestamp
): { add: Map<Asset, { count: number, tx: TxCUD<Doc>[] }>, change: Map<Asset, { count: number, tx: TxCUD<Doc>[] }> } {
const txes = persons.flatMap((it) => txMap.get(it))
const add = new Map<Asset, { count: number, tx: TxCUD<Doc>[] }>()
const change = new Map<Asset, { count: number, tx: TxCUD<Doc>[] }>()
const h = client.getHierarchy()
@ -153,8 +151,7 @@
{@const planned = gitem?.mappings.reduce((it, val) => it + val.total, 0) ?? 0}
{@const pevents = gitem?.events.reduce((it, val) => it + (val.dueDate - val.date), 0) ?? 0}
{@const busy = gitem?.busy.slots.reduce((it, val) => it + (val.dueDate - val.date), 0) ?? 0}
<!-- {@const accounts = personAccounts.filter((it) => it.person === person).map((it) => it._id)} -->
{@const txInfo = group(txes, persons, dayFrom, dayTo)}
{@const txInfo = group(txes.get(person) ?? [], dayFrom, dayTo)}
<div style:overflow="auto" style:height="{height}rem" class="p-1">
<div class="flex-row-center p-1">
<Icon icon={time.icon.Team} size={'small'} />

View File

@ -80,6 +80,7 @@
import PriorityEditor from './PriorityEditor.svelte'
import StatusEditor from './StatusEditor.svelte'
import EstimationEditor from './timereport/EstimationEditor.svelte'
import MilestoneEditor from '../milestones/MilestoneEditor.svelte'
const _class = tracker.class.Issue
export let space: Ref<Project> | undefined = undefined
@ -437,6 +438,16 @@
justify={'center'}
/>
{/if}
{#if enabledConfig(config, 'milestone')}
<MilestoneEditor
value={issue}
{space}
isEditable={true}
kind={'link-bordered'}
size={'small'}
justify={'center'}
/>
{/if}
{#if enabledConfig(config, 'dueDate')}
<DueDatePresenter value={issue} size={'small'} kind={'link-bordered'} />
{/if}

View File

@ -474,10 +474,13 @@ describe('PostgresAccountDB', () => {
expect(mockClient.begin).toHaveBeenCalled()
expect(mockClient).toHaveBeenCalledWith(
'global_account' // First call with schema name
'global_account' // Verify schema name
)
expect(mockClient.mock.calls[3][0].map((s: string) => s.replace(/\s+/g, ' ')).join('')).toBe(
' INSERT INTO ._account_applied_migrations (identifier, ddl, last_processed_at) VALUES (, , NOW()) ON CONFLICT (identifier) DO NOTHING '
)
expect(mockClient).toHaveBeenCalledWith(
['INSERT INTO ', '._account_applied_migrations (identifier, ddl) VALUES (', ', ', ') ON CONFLICT DO NOTHING'],
expect.anything(),
expect.anything(),
'test_migration',
'CREATE TABLE test'

View File

@ -488,15 +488,102 @@ export class PostgresAccountDB implements AccountDB {
}
async migrate (name: string, ddl: string): Promise<void> {
await this.client.begin(async (client) => {
const res =
await client`INSERT INTO ${this.client(this.ns)}._account_applied_migrations (identifier, ddl) VALUES (${name}, ${ddl}) ON CONFLICT DO NOTHING`
const staleTimeoutMs = 30000
const retryIntervalMs = 5000
let migrationComplete = false
let updateInterval: NodeJS.Timeout | null = null
let executed = false
if (res.count === 1) {
console.log(`Applying migration: ${name}`)
await client.unsafe(ddl)
const executeMigration = async (client: Sql): Promise<void> => {
updateInterval = setInterval(() => {
this.client`
UPDATE ${this.client(this.ns)}._account_applied_migrations
SET last_processed_at = NOW()
WHERE identifier = ${name} AND applied_at IS NULL
`.catch((err) => {
console.error(`Failed to update last_processed_at for migration ${name}:`, err)
})
}, 5000)
await client.unsafe(ddl)
executed = true
}
try {
while (!migrationComplete) {
try {
executed = false
await this.client.begin(async (client) => {
// Only locks if row exists and is not already locked
const existing = await client`
SELECT identifier, applied_at, last_processed_at
FROM ${this.client(this.ns)}._account_applied_migrations
WHERE identifier = ${name}
FOR UPDATE NOWAIT
`
if (existing.length > 0) {
if (existing[0].applied_at !== null) {
// Already completed
migrationComplete = true
} else if (
existing[0].last_processed_at === null ||
Date.now() - new Date(existing[0].last_processed_at).getTime() > staleTimeoutMs
) {
// Take over the stale migration
await client`
UPDATE ${this.client(this.ns)}._account_applied_migrations
SET last_processed_at = NOW()
WHERE identifier = ${name}
`
await executeMigration(client)
}
} else {
const res = await client`
INSERT INTO ${this.client(this.ns)}._account_applied_migrations
(identifier, ddl, last_processed_at)
VALUES (${name}, ${ddl}, NOW())
ON CONFLICT (identifier) DO NOTHING
`
if (res.count === 1) {
// Successfully inserted
await executeMigration(client)
}
// If insert failed (count === 0), another worker got it first, we'll retry the loop
}
})
if (executed) {
await this.client`
UPDATE ${this.client(this.ns)}._account_applied_migrations
SET applied_at = NOW()
WHERE identifier = ${name}
`
migrationComplete = true
}
} catch (err: any) {
if (['55P03', '40001'].includes(err.code)) {
// newLockNotAvailableError, WriteTooOldError
} else {
console.error(`Error in migration ${name}: ${err.code} - ${err.message}`)
}
if (updateInterval !== null) {
clearInterval(updateInterval)
}
}
if (!migrationComplete) {
await new Promise((resolve) => setTimeout(resolve, retryIntervalMs))
}
}
})
} finally {
if (updateInterval !== null) {
clearInterval(updateInterval)
}
}
}
async _init (): Promise<void> {
@ -507,10 +594,39 @@ export class PostgresAccountDB implements AccountDB {
CREATE TABLE IF NOT EXISTS ${this.ns}._account_applied_migrations (
identifier VARCHAR(255) NOT NULL PRIMARY KEY
, ddl TEXT NOT NULL
, applied_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
, applied_at TIMESTAMP WITH TIME ZONE
, last_processed_at TIMESTAMP WITH TIME ZONE
);
ALTER TABLE ${this.ns}._account_applied_migrations
ADD COLUMN IF NOT EXISTS last_processed_at TIMESTAMP WITH TIME ZONE;
`
)
const constraintsExist = await this.client`
SELECT 1
FROM information_schema.columns
WHERE table_schema = ${this.ns}
AND table_name = '_account_applied_migrations'
AND column_name = 'applied_at'
AND (column_default IS NOT NULL OR is_nullable = 'NO')
`
if (constraintsExist.length > 0) {
try {
await this.client.unsafe(
`
ALTER TABLE ${this.ns}._account_applied_migrations
ALTER COLUMN applied_at DROP DEFAULT;
ALTER TABLE ${this.ns}._account_applied_migrations
ALTER COLUMN applied_at DROP NOT NULL;
`
)
} catch (err) {
// Ignore errors since they likely mean constraints were already removed by another concurrent migration
}
}
}
async createWorkspace (data: WorkspaceData, status: WorkspaceStatusData): Promise<WorkspaceUuid> {

View File

@ -88,6 +88,7 @@ import {
DBCollectionHelper,
type DBDoc,
doFetchTypes,
filterProjection,
getDBClient,
inferType,
isDataField,
@ -662,7 +663,9 @@ abstract class PostgresAdapterBase implements DbAdapter {
const joins = this.buildJoins<T>(_class, options)
// Add workspace name as $1
const select = `SELECT ${this.getProjection(vars, domain, options?.projection, joins, options?.associations)} FROM ${domain}`
const projection = this.localizeProjection(_class, options?.projection ?? undefined)
const select = `SELECT ${this.getProjection(vars, domain, projection, joins, options?.associations)} FROM ${domain}`
const showArchived = shouldShowArchived(query, options)
const secJoin = this.addSecurity(vars, query, showArchived, domain, ctx.contextData)
@ -714,11 +717,11 @@ abstract class PostgresAdapterBase implements DbAdapter {
options?.associations === undefined
) {
return toFindResult(
result.map((p) => parseDocWithProjection(p, domain, options?.projection)),
result.map((p) => parseDocWithProjection(p, domain, projection)),
total
)
} else {
const res = this.parseLookup<T>(result, joins, options?.projection, domain)
const res = this.parseLookup<T>(result, joins, projection, domain)
return toFindResult(res, total)
}
})) as FindResult<T>
@ -739,6 +742,36 @@ abstract class PostgresAdapterBase implements DbAdapter {
)
}
private localizeProjection<T extends Doc>(
_class: Ref<Class<T>>,
projection: Projection<T> | undefined
): Projection<T> | undefined {
if (projection === undefined) return
if (!this.hierarchy.isMixin(_class)) {
return projection
}
projection = { ...projection }
for (const key in projection) {
if (key.includes('.')) continue
try {
const attr = this.hierarchy.findAttribute(_class, key)
if (attr !== undefined && this.hierarchy.isMixin(attr.attributeOf)) {
const newKey = `${attr.attributeOf}.${attr.name}` as keyof Projection<T>
projection[newKey] = projection[key]
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete projection[key]
}
} catch (err: any) {
// ignore, if
}
}
return projection
}
private buildJoins<T extends Doc>(_class: Ref<Class<T>>, options: ServerFindOptions<T> | undefined): JoinProps[] {
const joins = this.buildJoin(_class, options?.lookup)
if (options?.domainLookup !== undefined) {
@ -776,7 +809,7 @@ abstract class PostgresAdapterBase implements DbAdapter {
}
if (query.space === acc.uuid) return // TODO: was it for private spaces? If so, need to fix it as they are not identified by acc.uuid now
if (domain === DOMAIN_SPACE && isOwner(acc) && showArchived) return
const key = domain === DOMAIN_SPACE ? '_id' : domain === DOMAIN_TX ? "data ->> 'objectSpace'" : 'space'
const key = domain === DOMAIN_SPACE ? '_id' : domain === DOMAIN_TX ? '"objectSpace"' : 'space'
const privateCheck = domain === DOMAIN_SPACE ? ' OR sec.private = false' : ''
const archivedCheck = showArchived ? '' : ' AND sec.archived = false'
const q = `(sec.members @> '{"${acc.uuid}"}' OR sec."_class" = '${core.class.SystemSpace}'${privateCheck})${archivedCheck}`
@ -888,17 +921,8 @@ abstract class PostgresAdapterBase implements DbAdapter {
continue
}
if (column === 'data') {
const data = row[column]
if (projection !== undefined) {
if (projection !== undefined) {
for (const key in data) {
if (!Object.prototype.hasOwnProperty.call(projection, key) || (projection as any)[key] === 0) {
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete data[key]
}
}
}
}
let data = row[column]
data = filterProjection(data, projection)
doc = { ...doc, ...data }
} else {
if (column === 'createdOn' || column === 'modifiedOn') {

View File

@ -554,6 +554,9 @@ export function convertArrayParams (parameters?: ParameterOrJSON<any>[]): any[]
}
export function filterProjection<T extends Doc> (data: any, projection: Projection<T> | undefined): any {
if (projection === undefined) {
return data
}
for (const key in data) {
if (!Object.prototype.hasOwnProperty.call(projection, key) || (projection as any)[key] === 0) {
// check nested projections in case of object

View File

@ -14,7 +14,7 @@
//
import { MeasureContext } from '@hcengineering/core'
import postgres, { type Row } from 'postgres'
import postgres, { Sql, type Row } from 'postgres'
import { type Location, type UUID } from './types'
import { type RetryOptions, retry } from './retry'
@ -75,7 +75,7 @@ export interface WorkspaceStatsResult {
size: number
}
export function createDb (ctx: MeasureContext, connectionString: string): BlobDB {
export async function createDb (ctx: MeasureContext, connectionString: string): Promise<BlobDB> {
const sql = postgres(connectionString, {
max: 5,
connection: {
@ -94,8 +94,8 @@ export function createDb (ctx: MeasureContext, connectionString: string): BlobDB
}
})
const dbAdapter = new PostgresDBAdapter(sql)
return new LoggedDB(ctx, new PostgresDB(new RetryDBAdapter(dbAdapter, { retries: 5 })))
const db = await PostgresDB.create(ctx, sql)
return new LoggedDB(ctx, new RetryDB(db, { retries: 5 }))
}
export interface BlobDB {
@ -114,37 +114,51 @@ export interface BlobDB {
getWorkspaceStats: (ctx: MeasureContext, workspace: string) => Promise<WorkspaceStatsResult>
}
interface DBAdapter {
execute: <T extends any[] = (Row & Iterable<Row>)[]>(query: string, params?: any[]) => Promise<T>
}
export class PostgresDB implements BlobDB {
private constructor (private readonly sql: Sql) {}
class RetryDBAdapter implements DBAdapter {
constructor (
private readonly db: DBAdapter,
private readonly options: RetryOptions
) {}
async execute<T extends any[] = (Row & Iterable<Row>)[]>(query: string, params?: any[]): Promise<T> {
return await retry(() => this.db.execute(query, params), this.options)
static async create (ctx: MeasureContext, sql: Sql): Promise<PostgresDB> {
const db = new PostgresDB(sql)
await db.initSchema(ctx)
return db
}
}
class PostgresDBAdapter implements DBAdapter {
constructor (private readonly sql: postgres.Sql) {}
async execute<T extends any[] = (Row & Iterable<Row>)[]>(query: string, params?: any[]): Promise<T> {
query = params !== undefined && params.length > 0 ? injectVars(query, params) : query
return await this.sql.unsafe<T>(query)
}
}
export class PostgresDB implements BlobDB {
constructor (private readonly db: DBAdapter) {}
async initSchema (ctx: MeasureContext): Promise<void> {
await this.execute('CREATE SCHEMA IF NOT EXISTS blob')
await this.execute(`CREATE TABLE IF NOT EXISTS blob.migrations
(
name VARCHAR(255) NOT NULL,
created_on TIMESTAMP NOT NULL DEFAULT now()
)`)
const appliedMigrations = (await this.execute<Row[]>('SELECT name FROM blob.migrations')).map((row) => row.name)
ctx.info('applied migrations', { migrations: appliedMigrations })
for (const [name, sql] of getMigrations()) {
if (appliedMigrations.includes(name)) {
continue
}
try {
ctx.warn('applying migration', { migration: name })
await this.execute(sql)
await this.execute('INSERT INTO blob.migrations (name) VALUES ($1)', [name])
} catch (err: any) {
ctx.error('failed to apply migration', { migration: name, error: err })
throw err
}
}
}
async getData (ctx: MeasureContext, dataId: BlobDataId): Promise<BlobDataRecord | null> {
const { hash, location } = dataId
const rows = await this.db.execute<BlobDataRecord[]>(
const rows = await this.execute<BlobDataRecord[]>(
`
SELECT hash, location, filename, size, type
FROM blob.data
@ -159,7 +173,7 @@ export class PostgresDB implements BlobDB {
async deleteBlobList (ctx: MeasureContext, blobList: BlobIds): Promise<void> {
const { workspace, names } = blobList
await this.db.execute(
await this.execute(
`
UPDATE blob.blob
SET deleted_at = now()
@ -172,7 +186,7 @@ export class PostgresDB implements BlobDB {
async getBlob (ctx: MeasureContext, blobId: BlobId): Promise<BlobWithDataRecord | null> {
const { workspace, name } = blobId
const rows = await this.db.execute<BlobWithDataRecord[]>(
const rows = await this.execute<BlobWithDataRecord[]>(
`
SELECT b.workspace, b.name, b.hash, b.location, b.parent, d.filename, d.size, d.type
FROM blob.blob AS b
@ -193,7 +207,7 @@ export class PostgresDB implements BlobDB {
cursor = cursor ?? ''
limit = Math.min(limit ?? 100, 1000)
const rows = await this.db.execute<BlobWithDataRecord[]>(
const rows = await this.execute<BlobWithDataRecord[]>(
`
SELECT b.workspace, b.name, b.hash, b.location, b.parent, b.deleted_at, d.filename, d.size, d.type
FROM blob.blob AS b
@ -214,7 +228,7 @@ export class PostgresDB implements BlobDB {
async createBlob (ctx: MeasureContext, blob: Omit<BlobRecord, 'filename'>): Promise<void> {
const { workspace, name, hash, location, parent } = blob
await this.db.execute(
await this.execute(
`
UPSERT INTO blob.blob (workspace, name, hash, location, parent, deleted_at)
VALUES ($1, $2, $3, $4, $5, NULL)
@ -226,7 +240,7 @@ export class PostgresDB implements BlobDB {
async createData (ctx: MeasureContext, data: BlobDataRecord): Promise<void> {
const { hash, location, filename, size, type } = data
await this.db.execute(
await this.execute(
`
UPSERT INTO blob.data (hash, location, filename, size, type)
VALUES ($1, $2, $3, $4, $5)
@ -238,7 +252,7 @@ export class PostgresDB implements BlobDB {
async createBlobData (ctx: MeasureContext, data: BlobWithDataRecord): Promise<void> {
const { workspace, name, hash, location, parent, filename, size, type } = data
await this.db.execute(
await this.execute(
`
UPSERT INTO blob.data (hash, location, filename, size, type)
VALUES ($1, $2, $3, $4, $5)
@ -246,7 +260,7 @@ export class PostgresDB implements BlobDB {
[hash, location, filename, size, type]
)
await this.db.execute(
await this.execute(
`
UPSERT INTO blob.blob (workspace, name, hash, location, parent, deleted_at)
VALUES ($1, $2, $3, $4, $5, NULL)
@ -265,7 +279,7 @@ export class PostgresDB implements BlobDB {
queue.push(name)
while (queue.length > 0) {
const children = await this.db.execute<BlobId[]>(
const children = await this.execute<BlobId[]>(
`
SELECT blob.workspace, blob.name
FROM blob.blob
@ -284,7 +298,7 @@ export class PostgresDB implements BlobDB {
}
}
await this.db.execute(
await this.execute(
`
UPDATE blob.blob
SET deleted_at = now()
@ -297,7 +311,7 @@ export class PostgresDB implements BlobDB {
async getMeta (ctx: MeasureContext, blobId: BlobId): Promise<BlobMeta | null> {
const { workspace, name } = blobId
const rows = await this.db.execute<BlobMetaRecord[]>(
const rows = await this.execute<BlobMetaRecord[]>(
`
SELECT m.workspace, m.name, m.meta
FROM blob.blob b
@ -313,7 +327,7 @@ export class PostgresDB implements BlobDB {
async setMeta (ctx: MeasureContext, blobId: BlobId, meta: BlobMeta): Promise<void> {
const { workspace, name } = blobId
await this.db.execute(
await this.execute(
`
UPSERT INTO blob.meta (workspace, name, meta)
VALUES ($1, $2, $3)
@ -325,7 +339,7 @@ export class PostgresDB implements BlobDB {
async setParent (ctx: MeasureContext, blob: BlobId, parent: BlobId | null): Promise<void> {
const { workspace, name } = blob
await this.db.execute(
await this.execute(
`
UPDATE blob.blob
SET parent = $3
@ -336,14 +350,14 @@ export class PostgresDB implements BlobDB {
}
async getStats (ctx: MeasureContext): Promise<StatsResult> {
const blobStatsRows = await this.db.execute(`
const blobStatsRows = await this.execute(`
SELECT count(distinct b.workspace) as workspaces, count(1) as count, sum(d.size) as size
FROM blob.blob b
JOIN blob.data AS d ON b.hash = d.hash AND b.location = d.location
WHERE deleted_at IS NULL
`)
const dataStatsRows = await this.db.execute(`
const dataStatsRows = await this.execute(`
SELECT count(1) as count, sum(d.size) as size
FROM blob.data AS d
`)
@ -365,7 +379,7 @@ export class PostgresDB implements BlobDB {
}
async getWorkspaceStats (ctx: MeasureContext, workspace: string): Promise<WorkspaceStatsResult> {
const rows = await this.db.execute(
const rows = await this.execute(
`
SELECT count(1) as count, sum(d.size) as size
FROM blob.blob b
@ -384,6 +398,65 @@ export class PostgresDB implements BlobDB {
}
}
export class RetryDB implements BlobDB {
constructor (
private readonly db: BlobDB,
private readonly options: RetryOptions
) {}
async getData (ctx: MeasureContext, dataId: BlobDataId): Promise<BlobDataRecord | null> {
return await retry(() => this.db.getData(ctx, dataId), this.options)
}
async getBlob (ctx: MeasureContext, blobId: BlobId): Promise<BlobWithDataRecord | null> {
return await retry(() => this.db.getBlob(ctx, blobId), this.options)
}
async listBlobs (ctx: MeasureContext, workspace: string, cursor?: string, limit?: number): Promise<ListBlobResult> {
return await retry(() => this.db.listBlobs(ctx, workspace, cursor, limit), this.options)
}
async createData (ctx: MeasureContext, data: BlobDataRecord): Promise<void> {
await retry(() => this.db.createData(ctx, data), this.options)
}
async createBlob (ctx: MeasureContext, blob: Omit<BlobRecord, 'filename'>): Promise<void> {
await retry(() => this.db.createBlob(ctx, blob), this.options)
}
async createBlobData (ctx: MeasureContext, data: BlobWithDataRecord): Promise<void> {
await retry(() => this.db.createBlobData(ctx, data), this.options)
}
async deleteBlobList (ctx: MeasureContext, blobs: BlobIds): Promise<void> {
await retry(() => this.db.deleteBlobList(ctx, blobs), this.options)
}
async deleteBlob (ctx: MeasureContext, blob: BlobId): Promise<void> {
await retry(() => this.db.deleteBlob(ctx, blob), this.options)
}
async getMeta (ctx: MeasureContext, blobId: BlobId): Promise<BlobMeta | null> {
return await retry(() => this.db.getMeta(ctx, blobId), this.options)
}
async setMeta (ctx: MeasureContext, blobId: BlobId, meta: BlobMeta): Promise<void> {
await retry(() => this.db.setMeta(ctx, blobId, meta), this.options)
}
async setParent (ctx: MeasureContext, blob: BlobId, parent: BlobId | null): Promise<void> {
await retry(() => this.db.setParent(ctx, blob, parent), this.options)
}
async getStats (ctx: MeasureContext): Promise<StatsResult> {
return await retry(() => this.db.getStats(ctx), this.options)
}
async getWorkspaceStats (ctx: MeasureContext, workspace: string): Promise<WorkspaceStatsResult> {
return await retry(() => this.db.getWorkspaceStats(ctx, workspace), this.options)
}
}
export class LoggedDB implements BlobDB {
constructor (
private readonly ctx: MeasureContext,
@ -478,3 +551,44 @@ export function escape (value: any): string {
throw new Error(`Unsupported value type: ${typeof value}`)
}
}
function getMigrations (): [string, string][] {
return [migrationV1()]
}
function migrationV1 (): [string, string] {
const sql = `
CREATE TYPE IF NOT EXISTS blob.location AS ENUM ('eu', 'weur', 'eeur', 'wnam', 'enam', 'apac');
CREATE TABLE IF NOT EXISTS blob.data (
hash UUID NOT NULL,
location blob.location NOT NULL,
size INT8 NOT NULL,
filename STRING(255) NOT NULL,
type STRING(255) NOT NULL,
CONSTRAINT pk_data PRIMARY KEY (hash, location)
);
CREATE TABLE IF NOT EXISTS blob.blob (
workspace STRING(255) NOT NULL,
name STRING(255) NOT NULL,
hash UUID NOT NULL,
location blob.location NOT NULL,
parent STRING(255) DEFAULT NULL,
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
deleted_at TIMESTAMP DEFAULT NULL,
CONSTRAINT pk_blob PRIMARY KEY (workspace, name),
CONSTRAINT fk_data FOREIGN KEY (hash, location) REFERENCES blob.data (hash, location),
CONSTRAINT fk_parent FOREIGN KEY (workspace, parent) REFERENCES blob.blob (workspace, name) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS blob.meta (
workspace STRING(255) NOT NULL,
name STRING(255) NOT NULL,
meta JSONB NOT NULL,
CONSTRAINT pk_meta PRIMARY KEY (workspace, name),
CONSTRAINT fk_blob FOREIGN KEY (workspace, name) REFERENCES blob.blob (workspace, name)
);
`
return ['init_tables_01', sql]
}

View File

@ -48,7 +48,7 @@ export const main = async (): Promise<void> => {
)
})
const { app, close } = createServer(metricsContext, config)
const { app, close } = await createServer(metricsContext, config)
const server = listen(app, config.Port)
const shutdown = (): void => {

View File

@ -30,8 +30,10 @@ import {
type S3PutOptions
} from './types'
export function createBucket (client: S3, bucket: string): S3Bucket {
return new S3BucketImpl(client, bucket)
export async function createBucket (ctx: MeasureContext, client: S3, bucket: string): Promise<S3Bucket> {
const impl = new S3BucketImpl(client, bucket)
await impl.init(ctx)
return impl
}
class S3BucketImpl implements S3Bucket {
@ -40,6 +42,25 @@ class S3BucketImpl implements S3Bucket {
readonly bucket: string
) {}
async init (ctx: MeasureContext): Promise<void> {
try {
await this.client.headBucket({ Bucket: this.bucket })
} catch (err: any) {
if (err.name === 'NotFound') {
ctx.warn('bucket not found, creating', { bucket: this.bucket })
try {
await this.client.createBucket({ Bucket: this.bucket })
ctx.info('bucket created', { bucket: this.bucket })
} catch (err: any) {
ctx.error('failed to create bucket', { bucket: this.bucket, error: err.message })
throw err
}
} else {
throw err
}
}
}
async head (ctx: MeasureContext, key: string): Promise<S3Object | null> {
try {
const result = await ctx.with('s3.headObject', {}, () =>

View File

@ -96,7 +96,7 @@ const handleRequest = async (
}
}
export function createServer (ctx: MeasureContext, config: Config): { app: Express, close: () => void } {
export async function createServer (ctx: MeasureContext, config: Config): Promise<{ app: Express, close: () => void }> {
const buckets: Array<{ location: Location, bucket: S3Bucket }> = []
for (const bucket of config.Buckets) {
const location = bucket.location as Location
@ -108,13 +108,13 @@ export function createServer (ctx: MeasureContext, config: Config): { app: Expre
location === 'enam' ||
location === 'apac'
) {
buckets.push({ location, bucket: createBucket(createClient(bucket), bucket.bucket) })
buckets.push({ location, bucket: await createBucket(ctx, createClient(bucket), bucket.bucket) })
} else {
ctx.warn('invalid bucket location', { location, bucket })
}
}
const db = createDb(ctx, config.DbUrl)
const db = await createDb(ctx, config.DbUrl)
const datalake = new DatalakeImpl(db, buckets, { cacheControl })
const tempDir = new TemporaryDir(ctx, 'datalake-', config.CleanupInterval)

View File

@ -0,0 +1,165 @@
//
// Copyright © 2025 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import { MeasureContext, WorkspaceUuid } from '@hcengineering/core'
import { StorageAdapter } from '@hcengineering/server-core'
import * as serverClient from '@hcengineering/server-client'
import { GmailController } from '../gmailController'
import { GmailClient } from '../gmail'
import { WorkspaceClient } from '../workspaceClient'
import { Token } from '../types'
import * as integrations from '../integrations'
import * as tokens from '../tokens'
jest.mock('../workspaceClient')
jest.mock('../gmail')
jest.mock('../integrations')
jest.mock('../tokens')
jest.mock('@hcengineering/server-client')
jest.mock('../utils')
jest.mock('../config')
/* eslint-disable @typescript-eslint/unbound-method */
describe('GmailController', () => {
let gmailController: GmailController
let mockCtx: MeasureContext
let mockStorageAdapter: StorageAdapter
let mockWorkspaceClient: jest.Mocked<WorkspaceClient>
let mockGmailClients: Map<string, jest.Mocked<GmailClient>>
const workspaceAId: WorkspaceUuid = 'workspace-a' as any
const workspaceBId: WorkspaceUuid = 'workspace-b' as any
const workspaceATokens: Token[] = [
{ userId: 'user1', workspace: workspaceAId, token: 'token1' } as any,
{ userId: 'user2', workspace: workspaceAId, token: 'token2' } as any
]
const workspaceBTokens: Token[] = [
{ userId: 'user3', workspace: workspaceBId, token: 'token3' } as any,
{ userId: 'user4', workspace: workspaceBId, token: 'token4' } as any,
{ userId: 'user5', workspace: workspaceBId, token: 'token5' } as any
]
beforeEach(() => {
jest.clearAllMocks()
mockCtx = {
info: (message: string, details: any) => {
console.log(message, details)
},
error: (message: string, details: any) => {
console.error(message, details)
},
warn: (message: string, details: any) => {
console.warn(message, details)
}
} as unknown as MeasureContext
mockStorageAdapter = {} as unknown as StorageAdapter
mockWorkspaceClient = {
createGmailClient: jest.fn(),
checkUsers: jest.fn().mockResolvedValue(undefined),
getNewMessages: jest.fn().mockResolvedValue(undefined),
close: jest.fn()
} as unknown as jest.Mocked<WorkspaceClient>
// Create mock clients with unique properties
mockGmailClients = new Map()
const allUsers = [...workspaceATokens, ...workspaceBTokens].map((token) => token.userId)
allUsers.forEach((userId) => {
mockGmailClients.set(userId, {
startSync: jest.fn().mockResolvedValue(undefined),
sync: jest.fn().mockResolvedValue(undefined),
close: jest.fn().mockResolvedValue(undefined),
getUserEmail: jest.fn().mockReturnValue(`${userId}@example.com`)
} as unknown as jest.Mocked<GmailClient>)
})
// Mock WorkspaceClient.create
jest.spyOn(WorkspaceClient, 'create').mockResolvedValue(mockWorkspaceClient)
// Mock getIntegrations
jest
.spyOn(integrations, 'getIntegrations')
.mockResolvedValue([
{ workspaceUuid: workspaceAId, socialId: 'user1' } as any,
{ workspaceUuid: workspaceAId, socialId: 'user2' } as any,
{ workspaceUuid: workspaceBId, socialId: 'user3' } as any,
{ workspaceUuid: workspaceBId, socialId: 'user4' } as any,
{ workspaceUuid: workspaceBId, socialId: 'user5' } as any
])
// Mock getWorkspaceTokens
jest.spyOn(tokens, 'getWorkspaceTokens').mockImplementation(async (_, workspaceId) => {
if (workspaceId === workspaceAId) return workspaceATokens
if (workspaceId === workspaceBId) return workspaceBTokens
return []
})
// Mock getAccountClient
jest.spyOn(serverClient, 'getAccountClient').mockReturnValue({
getWorkspaceInfo: jest.fn().mockResolvedValue({ mode: 'active' })
} as any)
// Mock serviceToken
// eslint-disable-next-line @typescript-eslint/no-var-requires
jest.spyOn(require('../utils'), 'serviceToken').mockReturnValue('test-token')
// Mock JSON.parse
jest.spyOn(JSON, 'parse').mockReturnValue({
web: { client_id: 'id', client_secret: 'secret', redirect_uris: ['uri'] }
})
// Create GmailController
gmailController = GmailController.create(mockCtx, mockStorageAdapter)
// Mock createGmailClient to return appropriate mock client
mockWorkspaceClient.createGmailClient.mockImplementation(async (token: Token) => {
const client = mockGmailClients.get(token.userId)
if (client == null) {
throw new Error(`No mock client for userId: ${token.userId}`)
}
return client
})
})
it('should create clients for all tokens without duplicates', async () => {
// Execute startAll
await gmailController.startAll()
// Verify workspaces were created
expect(WorkspaceClient.create).toHaveBeenCalledTimes(2)
expect(WorkspaceClient.create).toHaveBeenCalledWith(
expect.anything(),
expect.anything(),
mockStorageAdapter,
workspaceAId
)
expect(WorkspaceClient.create).toHaveBeenCalledWith(
expect.anything(),
expect.anything(),
mockStorageAdapter,
workspaceBId
)
// Verify createGmailClient called 5 times
expect(mockWorkspaceClient.createGmailClient).toHaveBeenCalledTimes(5)
})
})

View File

@ -99,7 +99,7 @@ export class GmailClient {
private socialId: SocialId
) {
this.email = email
this.integrationToken = serviceToken()
this.integrationToken = serviceToken(workspaceId)
this.tokenStorage = new TokenStorage(this.ctx, workspaceId, this.integrationToken)
this.client = new TxOperations(client, this.socialId._id)
this.account = this.user.userId
@ -363,15 +363,11 @@ export class GmailClient {
this.socialId = await getOrCreateSocialId(this.account, this.email)
}
private async getCurrentToken (): Promise<Token | null> {
return await this.tokenStorage.getToken(this.socialId._id)
}
private async addClient (): Promise<void> {
try {
this.ctx.info('Register client', { socialId: this.socialId._id, email: this.email })
const controller = GmailController.getGmailController()
controller.addClient(this.socialId._id, this)
controller.addClient(this.socialId._id, this.user.workspace, this)
} catch (err) {
this.ctx.error('Add client error', {
workspaceUuid: this.user.workspace,

View File

@ -39,7 +39,11 @@ export class GmailController {
private readonly workspaces: Map<string, WorkspaceClient> = new Map<string, WorkspaceClient>()
private readonly credentials: ProjectCredentials
private readonly clients: Map<PersonId, GmailClient[]> = new Map<PersonId, GmailClient[]>()
private readonly clients: Map<PersonId, Map<WorkspaceUuid, GmailClient>> = new Map<
PersonId,
Map<WorkspaceUuid, GmailClient>
>()
private readonly initLimitter = new RateLimiter(config.InitLimit)
private readonly authProvider
@ -75,24 +79,37 @@ export class GmailController {
this.ctx.info('Start integrations', { count: integrations.length })
const limiter = new RateLimiter(config.InitLimit)
for (const integration of integrations) {
if (integration.workspaceUuid === null) continue
const workspaceIds = new Set<WorkspaceUuid>(
integrations
.map((integration) => {
if (integration.workspaceUuid == null) {
this.ctx.info('No workspace found', { integration })
return undefined
}
return integration.workspaceUuid
})
.filter((id): id is WorkspaceUuid => id != null)
)
this.ctx.info('Workspaces with integrations', { count: workspaceIds.size })
for (const workspace of workspaceIds) {
const wsToken = serviceToken(workspace)
const accountClient = getAccountClient(wsToken)
const tokens = await getWorkspaceTokens(accountClient, workspace)
await limiter.add(async () => {
if (integration.workspaceUuid === null) return
const accountClient = getAccountClient(token)
const info = await accountClient.getWorkspaceInfo()
if (info === undefined) {
this.ctx.info('workspace not found', { workspaceUuid: integration.workspaceUuid })
this.ctx.info('workspace not found', { workspaceUuid: workspace })
return
}
if (!isActiveMode(info.mode)) {
this.ctx.info('workspace is not active', { workspaceUuid: integration.workspaceUuid })
this.ctx.info('workspace is not active', { workspaceUuid: workspace })
return
}
const tokens = await getWorkspaceTokens(accountClient, integration.workspaceUuid)
this.ctx.info('Use stored tokens', { count: tokens.length })
const startPromise = this.startWorkspace(integration.workspaceUuid, tokens)
const startPromise = this.startWorkspace(workspace, tokens)
const timeoutPromise = new Promise<void>((resolve) => {
setTimeout(() => {
resolve()
@ -142,27 +159,35 @@ export class GmailController {
const data = JSON.parse(decode64(message))
const email = data.emailAddress
const clients = this.clients.get(email)
for (const client of clients ?? []) {
if (clients === undefined) {
this.ctx.info('No clients found', { email })
return
}
for (const client of clients.values()) {
void client.sync()
}
}
addClient (socialId: PersonId, client: GmailClient): void {
const clients = this.clients.get(socialId)
if (clients === undefined) {
this.clients.set(socialId, [client])
} else {
clients.push(client)
this.clients.set(socialId, clients)
addClient (socialId: PersonId, workspace: WorkspaceUuid, client: GmailClient): void {
let userClients = this.clients.get(socialId)
if (userClients === undefined) {
userClients = new Map<WorkspaceUuid, GmailClient>()
this.clients.set(socialId, userClients)
}
}
/*
async getGmailClient (userId: AccountUuid, workspace: WorkspaceUuid, token: string): Promise<GmailClient> {
const workspaceClient = await this.getWorkspaceClient(workspace)
return await workspaceClient.createGmailClient({ userId, workspace, token })
const existingClient = userClients.get(workspace)
if (existingClient != null) {
void existingClient.close().catch((err) => {
this.ctx.error('Error closing existing client', {
socialId,
workspace,
error: err.message
})
})
}
userClients.set(workspace, client)
}
*/
getAuthProvider (): AuthProvider {
return this.authProvider

View File

@ -14,7 +14,7 @@
// limitations under the License.
//
import { type Data, type Doc, type DocumentUpdate, systemAccountUuid } from '@hcengineering/core'
import { type Data, type Doc, type DocumentUpdate, systemAccountUuid, WorkspaceUuid } from '@hcengineering/core'
import { generateToken } from '@hcengineering/server-token'
import { deepEqual } from 'fast-equals'
import { type KeyValueClient, getClient as getKeyValueClient } from '@hcengineering/kvs-client'
@ -72,8 +72,8 @@ export function addFooter (message: string): string {
return message + config.FooterMessage.trim()
}
export function serviceToken (): string {
return generateToken(systemAccountUuid, undefined, { service: 'gmail' })
export function serviceToken (workspaceId?: WorkspaceUuid): string {
return generateToken(systemAccountUuid, workspaceId, { service: 'gmail' })
}
export async function wait (sec: number): Promise<void> {

View File

@ -55,6 +55,7 @@ Analytics.setTag('application', 'telegram-bot-service')
export async function requestReconnect (bot: Telegraf<TgContext>, limiter: Limiter): Promise<void> {
if (config.MongoDB === '' || config.MongoURL === '') {
ctx.info('MongoDB is not configured, skipping reconnect')
return
}
@ -84,11 +85,15 @@ export const start = async (): Promise<void> => {
setMetadata(serverClient.metadata.UserAgent, config.ServiceId)
registerLoaders()
ctx.info('Creating worker...')
const worker = await PlatformWorker.create(ctx)
ctx.info('Set up bot...')
const bot = await setUpBot(worker)
ctx.info('Creating server...')
const app = createServer(bot, worker, ctx)
ctx.info('Creating queue...')
const queue = getPlatformQueue('telegramBotService', config.QueueRegion)
ctx.info('queue', { clientId: queue.getClientId() })
if (config.Domain === '') {
ctx.info('Starting bot with polling')
@ -110,7 +115,9 @@ export const start = async (): Promise<void> => {
res.status(200).send()
})
ctx.info('Requesting reconnect...')
await requestReconnect(bot, worker.limiter)
ctx.info('Starting server...')
const server = listen(app, ctx, config.Port)
const consumer = queue.createConsumer<TelegramQueueMessage>(

View File

@ -87,7 +87,9 @@ export class PlatformWorker {
const storageConfig: StorageConfiguration = storageConfigFromEnv()
const storage = buildStorageFromConfig(storageConfig)
ctx.info('Connecting to database...', { dbUrl: config.DbUrl })
const db = await getDb()
ctx.info('Database connected')
const limiter = new Limiter()
return new PlatformWorker(ctx, storage, limiter, db)

View File

@ -1,4 +1,4 @@
STORAGE_CONFIG="minio|minio?accessKey=minioadmin&secretKey=minioadmin"
STORAGE_CONFIG="datalake|http://datalake:4031"
BACKUP_STORAGE_CONFIG="minio|minio?accessKey=minioadmin&secretKey=minioadmin"
BACKUP_BUCKET_NAME=dev-backups

View File

@ -88,6 +88,10 @@ services:
ports:
- 9002:9000
- 9003:9001
healthcheck:
test: ["CMD", "mc", "ready", "local"]
interval: 5s
retries: 10
elastic:
image: 'elasticsearch:7.14.2'
expose:
@ -166,7 +170,9 @@ services:
- SERVER_SECRET=secret
- ACCOUNTS_URL_INTERNAL=http://account:3003
- ACCOUNTS_URL=http://localhost:3003
- UPLOAD_URL=/files
- FILES_URL=http://localhost:4031/blob/:workspace/:blobId/:filename
- UPLOAD_URL=http://localhost:4031/upload/form-data/:workspace
- PREVIEW_CONFIG=http://localhost:4031/image/fit=cover,width=:width,height=:height,dpr=:dpr/:workspace/:blobId
- ELASTIC_URL=http://elastic:9200
- GMAIL_URL=http://localhost:8088
- CALENDAR_URL=http://localhost:8095
@ -258,6 +264,28 @@ services:
- REKONI_URL=http://rekoni:4007
- ACCOUNTS_URL=http://account:3003
- STATS_URL=http://stats:4901
datalake:
image: hardcoreeng/datalake
depends_on:
minio:
condition: service_healthy
cockroach:
condition: service_started
stats:
condition: service_started
account:
condition: service_started
ports:
- 4031:4031
environment:
- PORT=4031
- SECRET=secret
- ACCOUNTS_URL=http://account:3003
- STATS_URL=http://stats:4901
- STREAM_URL=http://localhost:1081/recording
- DB_URL=${DB_PG_URL}
- BUCKETS=blobs,eu|http://minio:9000?accessKey=minioadmin&secretKey=minioadmin
restart: unless-stopped
stats:
image: hardcoreeng/stats
ports:

View File

@ -195,9 +195,8 @@ test.describe('Content in the Documents tests', () => {
})
})
test('Check Image views', async ({ page, context }) => {
test('Check Image can be opened in popup', async ({ page, context }) => {
await documentContentPage.addImageToDocument(page)
const imageSrc = await documentContentPage.firstImageInDocument().getAttribute('src')
await test.step('User can open image on current page', async () => {
await documentContentPage.clickImageFullscreenButton()
@ -210,6 +209,12 @@ test.describe('Content in the Documents tests', () => {
await expect(documentContentPage.imageInPopup()).toBeHidden()
}).toPass(retryOptions)
})
})
// TODO this currently fails for datalake because it downloads the image instead of opening it in the new tab
test.skip('Check Image can be opened in new tab', async ({ page, context }) => {
await documentContentPage.addImageToDocument(page)
const imageSrc = await documentContentPage.firstImageInDocument().getAttribute('src')
await test.step('User can open image original in the new tab', async () => {
const pagePromise = context.waitForEvent('page', {