mirror of
https://github.com/hcengineering/platform.git
synced 2025-06-09 09:20:54 +00:00
UBEF-4319: Few more performance fixes (#4613)
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
parent
19da703193
commit
96dc2f9847
@ -24,8 +24,8 @@ dependencies:
|
||||
specifier: ^3.1.0
|
||||
version: 3.4.3
|
||||
'@playwright/test':
|
||||
specifier: ^1.40.1
|
||||
version: 1.40.1
|
||||
specifier: ^1.41.2
|
||||
version: 1.41.2
|
||||
'@rush-temp/account':
|
||||
specifier: file:./projects/account.tgz
|
||||
version: file:projects/account.tgz(@types/node@20.11.16)(bufferutil@4.0.7)(esbuild@0.16.17)(svelte@4.2.5)(ts-node@10.9.1)
|
||||
@ -3582,12 +3582,12 @@ packages:
|
||||
dev: false
|
||||
optional: true
|
||||
|
||||
/@playwright/test@1.40.1:
|
||||
resolution: {integrity: sha512-EaaawMTOeEItCRvfmkI9v6rBkF1svM8wjl/YPRrg2N2Wmp+4qJYkWtJsbew1szfKKDm6fPLy4YAanBhIlf9dWw==}
|
||||
/@playwright/test@1.41.2:
|
||||
resolution: {integrity: sha512-qQB9h7KbibJzrDpkXkYvsmiDJK14FULCCZgEcoe2AvFAS64oCirWTwzTlAYEbKaRxWs5TFesE1Na6izMv3HfGg==}
|
||||
engines: {node: '>=16'}
|
||||
hasBin: true
|
||||
dependencies:
|
||||
playwright: 1.40.1
|
||||
playwright: 1.41.2
|
||||
dev: false
|
||||
|
||||
/@polka/url@1.0.0-next.23:
|
||||
@ -12980,18 +12980,18 @@ packages:
|
||||
find-up: 6.3.0
|
||||
dev: false
|
||||
|
||||
/playwright-core@1.40.1:
|
||||
resolution: {integrity: sha512-+hkOycxPiV534c4HhpfX6yrlawqVUzITRKwHAmYfmsVreltEl6fAZJ3DPfLMOODw0H3s1Itd6MDCWmP1fl/QvQ==}
|
||||
/playwright-core@1.41.2:
|
||||
resolution: {integrity: sha512-VaTvwCA4Y8kxEe+kfm2+uUUw5Lubf38RxF7FpBxLPmGe5sdNkSg5e3ChEigaGrX7qdqT3pt2m/98LiyvU2x6CA==}
|
||||
engines: {node: '>=16'}
|
||||
hasBin: true
|
||||
dev: false
|
||||
|
||||
/playwright@1.40.1:
|
||||
resolution: {integrity: sha512-2eHI7IioIpQ0bS1Ovg/HszsN/XKNwEG1kbzSDDmADpclKc7CyqkHw7Mg2JCz/bbCxg25QUPcjksoMW7JcIFQmw==}
|
||||
/playwright@1.41.2:
|
||||
resolution: {integrity: sha512-v0bOa6H2GJChDL8pAeLa/LZC4feoAMbSQm1/jF/ySsWWoaNItvrMP7GEkvEEFyCTUYKMxjQKaTSg5up7nR6/8A==}
|
||||
engines: {node: '>=16'}
|
||||
hasBin: true
|
||||
dependencies:
|
||||
playwright-core: 1.40.1
|
||||
playwright-core: 1.41.2
|
||||
optionalDependencies:
|
||||
fsevents: 2.3.2
|
||||
dev: false
|
||||
@ -22654,12 +22654,12 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/tests-sanity.tgz(svelte@4.2.5):
|
||||
resolution: {integrity: sha512-kwOYLXjeTn+4Lr7IdpsgiNRrlIEJ0ZzY3DIFLJUTFeoTaVrYR3yZmGJjzS9rJMxSYFL6zKGPkTCwju94g4AXFw==, tarball: file:projects/tests-sanity.tgz}
|
||||
resolution: {integrity: sha512-jRAMJfzX45PJrXgNSbz7okYMeLRbdgqbOjCW71E0dtMthnBVqpVL2+VC/f+R2OdctkScogn1d07cfV7Irkds7A==, tarball: file:projects/tests-sanity.tgz}
|
||||
id: file:projects/tests-sanity.tgz
|
||||
name: '@rush-temp/tests-sanity'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
'@playwright/test': 1.40.1
|
||||
'@playwright/test': 1.41.2
|
||||
'@types/jest': 29.5.5
|
||||
'@types/node': 20.11.16
|
||||
'@typescript-eslint/eslint-plugin': 6.11.0(@typescript-eslint/parser@6.11.0)(eslint@8.54.0)(typescript@5.3.3)
|
||||
|
@ -19,6 +19,7 @@ import type {
|
||||
DocumentQuery,
|
||||
FindOptions,
|
||||
FindResult,
|
||||
MeasureContext,
|
||||
Ref,
|
||||
Tx,
|
||||
TxResult,
|
||||
@ -37,6 +38,7 @@ class InMemoryTxAdapter extends DummyDbAdapter implements TxAdapter {
|
||||
}
|
||||
|
||||
async findAll<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
_class: Ref<Class<T>>,
|
||||
query: DocumentQuery<T>,
|
||||
options?: FindOptions<T>
|
||||
@ -44,7 +46,7 @@ class InMemoryTxAdapter extends DummyDbAdapter implements TxAdapter {
|
||||
return await this.txdb.findAll(_class, query, options)
|
||||
}
|
||||
|
||||
async tx (...tx: Tx[]): Promise<TxResult[]> {
|
||||
async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
|
||||
const r: TxResult[] = []
|
||||
for (const t of tx) {
|
||||
r.push(await this.txdb.tx(t))
|
||||
|
@ -399,7 +399,6 @@ class Connection implements ClientConnection {
|
||||
method: 'measure-done',
|
||||
params: [operationName, mid]
|
||||
})
|
||||
|
||||
return {
|
||||
time: Date.now() - dateNow,
|
||||
serverTime
|
||||
@ -415,12 +414,17 @@ class Connection implements ClientConnection {
|
||||
return await this.sendRequest({ method: 'getAccount', params: [] })
|
||||
}
|
||||
|
||||
findAll<T extends Doc>(
|
||||
async findAll<T extends Doc>(
|
||||
_class: Ref<Class<T>>,
|
||||
query: DocumentQuery<T>,
|
||||
options?: FindOptions<T>
|
||||
): Promise<FindResult<T>> {
|
||||
return this.sendRequest({ method: 'findAll', params: [_class, query, options] })
|
||||
const st = Date.now()
|
||||
const result = await this.sendRequest({ method: 'findAll', params: [_class, query, options] })
|
||||
if (Date.now() - st > 1000) {
|
||||
console.error('measure slow findAll', Date.now() - st, _class, query, options, result)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
tx (tx: Tx): Promise<TxResult> {
|
||||
|
@ -127,7 +127,7 @@
|
||||
handleRowFocused(focusDoc)
|
||||
}
|
||||
},
|
||||
{ ...resultOptions, limit: limit ?? 200 }
|
||||
{ ...resultOptions, limit: Math.min(limit ?? 200, 200) }
|
||||
)
|
||||
})
|
||||
} else {
|
||||
@ -143,7 +143,7 @@
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
function limitGroup (items: Doc[], limit: number | undefined): Doc[] {
|
||||
const res = limit !== undefined ? items.slice(0, limit) : items
|
||||
const res = limit !== undefined ? items.slice(0, Math.min(limit, 200)) : items
|
||||
return res
|
||||
}
|
||||
|
||||
|
@ -23,11 +23,11 @@ import core, {
|
||||
IndexStageState,
|
||||
MeasureContext,
|
||||
Ref,
|
||||
Storage,
|
||||
WorkspaceId
|
||||
} from '@hcengineering/core'
|
||||
import {
|
||||
contentStageId,
|
||||
DbAdapter,
|
||||
docKey,
|
||||
DocUpdateHandler,
|
||||
fieldStateId,
|
||||
@ -40,12 +40,10 @@ import {
|
||||
loadIndexStageStage,
|
||||
RateLimitter
|
||||
} from '@hcengineering/server-core'
|
||||
|
||||
import got from 'got'
|
||||
|
||||
import { chunks } from './encoder/encoder'
|
||||
import openaiPlugin, { openAIRatelimitter } from './plugin'
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
@ -118,10 +116,10 @@ export class OpenAIEmbeddingsStage implements FullTextPipelineStage {
|
||||
})
|
||||
}
|
||||
|
||||
async initialize (storage: Storage, pipeline: FullTextPipeline): Promise<void> {
|
||||
async initialize (ctx: MeasureContext, storage: DbAdapter, pipeline: FullTextPipeline): Promise<void> {
|
||||
try {
|
||||
// Just do nothing
|
||||
const config = await storage.findAll(openaiPlugin.class.OpenAIConfiguration, {})
|
||||
const config = await storage.findAll(ctx, openaiPlugin.class.OpenAIConfiguration, {})
|
||||
let needCheck = 0
|
||||
if (config.length > 0) {
|
||||
if (this.enabled !== config[0].embeddings) {
|
||||
@ -159,14 +157,21 @@ export class OpenAIEmbeddingsStage implements FullTextPipelineStage {
|
||||
this.enabled = false
|
||||
}
|
||||
|
||||
;[this.stageValue, this.indexState] = await loadIndexStageStage(storage, this.indexState, this.stageId, 'config', {
|
||||
enabled: this.enabled,
|
||||
endpoint: this.endpoint,
|
||||
field: this.field,
|
||||
mode: this.model,
|
||||
copyToState: this.copyToState,
|
||||
stripNewLines: true
|
||||
})
|
||||
;[this.stageValue, this.indexState] = await loadIndexStageStage(
|
||||
ctx,
|
||||
storage,
|
||||
this.indexState,
|
||||
this.stageId,
|
||||
'config',
|
||||
{
|
||||
enabled: this.enabled,
|
||||
endpoint: this.endpoint,
|
||||
field: this.field,
|
||||
mode: this.model,
|
||||
copyToState: this.copyToState,
|
||||
stripNewLines: true
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async getEmbedding (text: string): Promise<OpenAIEmbeddingResponse> {
|
||||
|
@ -23,6 +23,7 @@ import {
|
||||
FindResult,
|
||||
Hierarchy,
|
||||
IndexingConfiguration,
|
||||
MeasureContext,
|
||||
ModelDb,
|
||||
Ref,
|
||||
StorageIterator,
|
||||
@ -47,13 +48,14 @@ export interface DbAdapter {
|
||||
|
||||
close: () => Promise<void>
|
||||
findAll: <T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
_class: Ref<Class<T>>,
|
||||
query: DocumentQuery<T>,
|
||||
options?: FindOptions<T> & {
|
||||
domain?: Domain // Allow to find for Doc's in specified domain only.
|
||||
}
|
||||
) => Promise<FindResult<T>>
|
||||
tx: (...tx: Tx[]) => Promise<TxResult[]>
|
||||
tx: (ctx: MeasureContext, ...tx: Tx[]) => Promise<TxResult[]>
|
||||
|
||||
find: (domain: Domain) => StorageIterator
|
||||
|
||||
@ -97,6 +99,7 @@ export interface DbAdapterConfiguration {
|
||||
export class DummyDbAdapter implements DbAdapter {
|
||||
async init (model: Tx[]): Promise<void> {}
|
||||
async findAll<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
_class: Ref<Class<T>>,
|
||||
query: DocumentQuery<T>,
|
||||
options?: FindOptions<T> | undefined
|
||||
@ -107,7 +110,7 @@ export class DummyDbAdapter implements DbAdapter {
|
||||
async createIndexes (domain: Domain, config: Pick<IndexingConfiguration<Doc>, 'indexes'>): Promise<void> {}
|
||||
async removeOldIndex (domain: Domain, deletePattern: RegExp, keepPattern: RegExp): Promise<void> {}
|
||||
|
||||
async tx (...tx: Tx[]): Promise<TxResult[]> {
|
||||
async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
@ -140,6 +143,7 @@ class InMemoryAdapter extends DummyDbAdapter implements DbAdapter {
|
||||
}
|
||||
|
||||
async findAll<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
_class: Ref<Class<T>>,
|
||||
query: DocumentQuery<T>,
|
||||
options?: FindOptions<T>
|
||||
@ -147,7 +151,7 @@ class InMemoryAdapter extends DummyDbAdapter implements DbAdapter {
|
||||
return await this.modeldb.findAll(_class, query, options)
|
||||
}
|
||||
|
||||
async tx (...tx: Tx[]): Promise<TxResult[]> {
|
||||
async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
|
||||
return await this.modeldb.tx(...tx)
|
||||
}
|
||||
|
||||
|
@ -21,12 +21,12 @@ import core, {
|
||||
DocumentUpdate,
|
||||
MeasureContext,
|
||||
Ref,
|
||||
Storage,
|
||||
WorkspaceId
|
||||
} from '@hcengineering/core'
|
||||
import { MinioService } from '@hcengineering/minio'
|
||||
import { DbAdapter } from '../adapter'
|
||||
import { ContentTextAdapter, IndexedDoc } from '../types'
|
||||
import { contentStageId, DocUpdateHandler, fieldStateId, FullTextPipeline, FullTextPipelineStage } from './types'
|
||||
import { DocUpdateHandler, FullTextPipeline, FullTextPipelineStage, contentStageId, fieldStateId } from './types'
|
||||
import { docKey, docUpdKey, getFullTextIndexableAttributes } from './utils'
|
||||
|
||||
/**
|
||||
@ -57,7 +57,7 @@ export class ContentRetrievalStage implements FullTextPipelineStage {
|
||||
private readonly contentAdapter: ContentTextAdapter
|
||||
) {}
|
||||
|
||||
async initialize (storage: Storage, pipeline: FullTextPipeline): Promise<void> {
|
||||
async initialize (ctx: MeasureContext, storage: DbAdapter, pipeline: FullTextPipeline): Promise<void> {
|
||||
// Just do nothing
|
||||
}
|
||||
|
||||
|
@ -23,10 +23,10 @@ import core, {
|
||||
IndexStageState,
|
||||
MeasureContext,
|
||||
Ref,
|
||||
ServerStorage,
|
||||
Storage
|
||||
ServerStorage
|
||||
} from '@hcengineering/core'
|
||||
import { deepEqual } from 'fast-equals'
|
||||
import { DbAdapter } from '../adapter'
|
||||
import { IndexedDoc } from '../types'
|
||||
import { contentStageId, DocUpdateHandler, fieldStateId, FullTextPipeline, FullTextPipelineStage } from './types'
|
||||
import {
|
||||
@ -34,11 +34,11 @@ import {
|
||||
docKey,
|
||||
docUpdKey,
|
||||
getContent,
|
||||
getFullTextIndexableAttributes,
|
||||
getCustomAttrKeys,
|
||||
getFullTextContext,
|
||||
getFullTextIndexableAttributes,
|
||||
isFullTextAttribute,
|
||||
loadIndexStageStage,
|
||||
getCustomAttrKeys
|
||||
loadIndexStageStage
|
||||
} from './utils'
|
||||
|
||||
/**
|
||||
@ -62,7 +62,7 @@ export class IndexedFieldStage implements FullTextPipelineStage {
|
||||
|
||||
constructor (private readonly dbStorage: ServerStorage) {}
|
||||
|
||||
async initialize (storage: Storage, pipeline: FullTextPipeline): Promise<void> {
|
||||
async initialize (ctx: MeasureContext, storage: DbAdapter, pipeline: FullTextPipeline): Promise<void> {
|
||||
const indexablePropogate = (
|
||||
await pipeline.model.findAll(core.class.Class, {
|
||||
[core.mixin.FullTextSearchContext]: { $exists: true }
|
||||
@ -83,10 +83,17 @@ export class IndexedFieldStage implements FullTextPipelineStage {
|
||||
).map((it) => it._id)
|
||||
|
||||
indexablePropogate.sort()
|
||||
;[this.stageValue, this.indexState] = await loadIndexStageStage(storage, this.indexState, this.stageId, 'config', {
|
||||
classes: indexablePropogate,
|
||||
forceIndex: forceIndexing
|
||||
})
|
||||
;[this.stageValue, this.indexState] = await loadIndexStageStage(
|
||||
ctx,
|
||||
storage,
|
||||
this.indexState,
|
||||
this.stageId,
|
||||
'config',
|
||||
{
|
||||
classes: indexablePropogate,
|
||||
forceIndex: forceIndexing
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async search (
|
||||
|
@ -27,9 +27,10 @@ import core, {
|
||||
MeasureContext,
|
||||
Ref,
|
||||
ServerStorage,
|
||||
Storage,
|
||||
WorkspaceId
|
||||
} from '@hcengineering/core'
|
||||
import { DbAdapter } from '../adapter'
|
||||
import { updateDocWithPresenter } from '../mapper'
|
||||
import { FullTextAdapter, IndexedDoc } from '../types'
|
||||
import { summaryStageId } from './summary'
|
||||
import {
|
||||
@ -48,7 +49,6 @@ import {
|
||||
IndexKeyOptions,
|
||||
isCustomAttr
|
||||
} from './utils'
|
||||
import { updateDocWithPresenter } from '../mapper'
|
||||
|
||||
/**
|
||||
* @public
|
||||
@ -75,7 +75,7 @@ export class FullTextPushStage implements FullTextPipelineStage {
|
||||
readonly workspace: WorkspaceId
|
||||
) {}
|
||||
|
||||
async initialize (storage: Storage, pipeline: FullTextPipeline): Promise<void> {
|
||||
async initialize (ctx: MeasureContext, storage: DbAdapter, pipeline: FullTextPipeline): Promise<void> {
|
||||
// Just do nothing
|
||||
try {
|
||||
const r = await this.fulltextAdapter.initMapping()
|
||||
|
@ -108,6 +108,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
async markRemove (doc: DocIndexState): Promise<void> {
|
||||
const ops = new TxFactory(core.account.System, true)
|
||||
await this.storage.tx(
|
||||
this.metrics,
|
||||
ops.createTxUpdateDoc(doc._class, doc.space, doc._id, {
|
||||
removed: true
|
||||
})
|
||||
@ -122,7 +123,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
): Promise<{ docs: IndexedDoc[], pass: boolean }> {
|
||||
const result: IndexedDoc[] = []
|
||||
for (const st of this.stages) {
|
||||
await st.initialize(this.storage, this)
|
||||
await st.initialize(this.metrics, this.storage, this)
|
||||
const docs = await st.search(_classes, search, size, from)
|
||||
result.push(...docs.docs)
|
||||
if (!docs.pass) {
|
||||
@ -279,7 +280,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
|
||||
async initializeStages (): Promise<void> {
|
||||
for (const st of this.stages) {
|
||||
await st.initialize(this.storage, this)
|
||||
await st.initialize(this.metrics, this.storage, this)
|
||||
}
|
||||
}
|
||||
|
||||
@ -401,8 +402,9 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
let result = await ctx.with(
|
||||
'get-to-index',
|
||||
{},
|
||||
async () =>
|
||||
async (ctx) =>
|
||||
await this.storage.findAll(
|
||||
ctx,
|
||||
core.class.DocIndexState,
|
||||
{
|
||||
[`stages.${st.stageId}`]: { $ne: st.stageValue },
|
||||
@ -509,6 +511,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
private async processRemove (): Promise<void> {
|
||||
while (true) {
|
||||
const result = await this.storage.findAll(
|
||||
this.metrics,
|
||||
core.class.DocIndexState,
|
||||
{
|
||||
removed: true
|
||||
@ -641,6 +644,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
|
||||
const states = (
|
||||
await this.storage.findAll(
|
||||
ctx,
|
||||
core.class.DocIndexState,
|
||||
{
|
||||
objectClass: c,
|
||||
@ -697,6 +701,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
// remove index states for documents that do not exist
|
||||
const toRemove = (
|
||||
await this.storage.findAll(
|
||||
ctx,
|
||||
core.class.DocIndexState,
|
||||
{ objectClass: c, generationId: { $ne: generationId } },
|
||||
{ projection: { _id: 1 } }
|
||||
@ -713,6 +718,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
|
||||
while (true) {
|
||||
const docRefs = await this.storage.findAll(
|
||||
ctx,
|
||||
core.class.DocIndexState,
|
||||
{ objectClass: { $nin: allClasses } },
|
||||
{ projection: { _id: 1, objectClass: 1 }, limit: 10000 }
|
||||
|
@ -26,19 +26,19 @@ import core, {
|
||||
isFullTextAttribute,
|
||||
MeasureContext,
|
||||
Ref,
|
||||
ServerStorage,
|
||||
Storage
|
||||
ServerStorage
|
||||
} from '@hcengineering/core'
|
||||
import { translate } from '@hcengineering/platform'
|
||||
import { convert } from 'html-to-text'
|
||||
import { DbAdapter } from '../adapter'
|
||||
import { IndexedDoc } from '../types'
|
||||
import { contentStageId, DocUpdateHandler, fieldStateId, FullTextPipeline, FullTextPipelineStage } from './types'
|
||||
import {
|
||||
collectPropagate,
|
||||
collectPropagateClasses,
|
||||
getFullTextContext,
|
||||
loadIndexStageStage,
|
||||
isCustomAttr
|
||||
isCustomAttr,
|
||||
loadIndexStageStage
|
||||
} from './utils'
|
||||
|
||||
/**
|
||||
@ -73,7 +73,7 @@ export class FullSummaryStage implements FullTextPipelineStage {
|
||||
|
||||
constructor (private readonly dbStorage: ServerStorage) {}
|
||||
|
||||
async initialize (storage: Storage, pipeline: FullTextPipeline): Promise<void> {
|
||||
async initialize (ctx: MeasureContext, storage: DbAdapter, pipeline: FullTextPipeline): Promise<void> {
|
||||
const indexable = (
|
||||
await pipeline.model.findAll(core.class.Class, { [core.mixin.FullTextSearchContext]: { $exists: true } })
|
||||
)
|
||||
@ -81,10 +81,17 @@ export class FullSummaryStage implements FullTextPipelineStage {
|
||||
.filter((it) => it.fullTextSummary)
|
||||
.map((it) => it._id + (it.propagateClasses ?? []).join('|'))
|
||||
indexable.sort()
|
||||
;[this.stageValue, this.indexState] = await loadIndexStageStage(storage, this.indexState, this.stageId, 'config', {
|
||||
classes: indexable,
|
||||
matchExtra: this.matchExtra
|
||||
})
|
||||
;[this.stageValue, this.indexState] = await loadIndexStageStage(
|
||||
ctx,
|
||||
storage,
|
||||
this.indexState,
|
||||
this.stageId,
|
||||
'config',
|
||||
{
|
||||
classes: indexable,
|
||||
matchExtra: this.matchExtra
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async search (
|
||||
|
@ -22,9 +22,9 @@ import {
|
||||
Hierarchy,
|
||||
MeasureContext,
|
||||
ModelDb,
|
||||
Ref,
|
||||
Storage
|
||||
Ref
|
||||
} from '@hcengineering/core'
|
||||
import { DbAdapter } from '../adapter'
|
||||
import type { IndexedDoc } from '../types'
|
||||
|
||||
/**
|
||||
@ -78,7 +78,7 @@ export interface FullTextPipelineStage {
|
||||
|
||||
stageValue: boolean | string
|
||||
|
||||
initialize: (storage: Storage, pipeline: FullTextPipeline) => Promise<void>
|
||||
initialize: (ctx: MeasureContext, storage: DbAdapter, pipeline: FullTextPipeline) => Promise<void>
|
||||
|
||||
// Collect all changes related to bulk of document states
|
||||
collect: (docs: DocIndexState[], pipeline: FullTextPipeline, ctx: MeasureContext) => Promise<void>
|
||||
|
@ -34,13 +34,14 @@ import core, {
|
||||
IndexStageState,
|
||||
isFullTextAttribute,
|
||||
isIndexedAttribute,
|
||||
MeasureContext,
|
||||
Obj,
|
||||
Ref,
|
||||
Space,
|
||||
Storage,
|
||||
TxFactory
|
||||
} from '@hcengineering/core'
|
||||
import { deepEqual } from 'fast-equals'
|
||||
import { DbAdapter } from '../adapter'
|
||||
import plugin from '../plugin'
|
||||
import { FullTextPipeline } from './types'
|
||||
/**
|
||||
@ -175,14 +176,15 @@ export function createStateDoc (
|
||||
* @public
|
||||
*/
|
||||
export async function loadIndexStageStage (
|
||||
storage: Storage,
|
||||
ctx: MeasureContext,
|
||||
storage: DbAdapter,
|
||||
state: IndexStageState | undefined,
|
||||
stageId: string,
|
||||
field: string,
|
||||
newValue: any
|
||||
): Promise<[boolean | string, IndexStageState]> {
|
||||
if (state === undefined) {
|
||||
;[state] = await storage.findAll(core.class.IndexStageState, { stageId })
|
||||
;[state] = await storage.findAll(ctx, core.class.IndexStageState, { stageId })
|
||||
}
|
||||
const attributes: Record<string, any> = state?.attributes ?? {}
|
||||
|
||||
@ -203,7 +205,7 @@ export async function loadIndexStageStage (
|
||||
}
|
||||
if (state === undefined) {
|
||||
const id: Ref<IndexStageState> = generateId()
|
||||
await storage.tx(ops.createTxCreateDoc(core.class.IndexStageState, plugin.space.DocIndexState, data, id))
|
||||
await storage.tx(ctx, ops.createTxCreateDoc(core.class.IndexStageState, plugin.space.DocIndexState, data, id))
|
||||
state = {
|
||||
...data,
|
||||
_class: core.class.IndexStageState,
|
||||
@ -213,7 +215,10 @@ export async function loadIndexStageStage (
|
||||
modifiedOn: Date.now()
|
||||
}
|
||||
} else {
|
||||
await storage.tx(ops.createTxUpdateDoc(core.class.IndexStageState, plugin.space.DocIndexState, state._id, data))
|
||||
await storage.tx(
|
||||
ctx,
|
||||
ops.createTxUpdateDoc(core.class.IndexStageState, plugin.space.DocIndexState, state._id, data)
|
||||
)
|
||||
state = { ...state, ...data, modifiedOn: Date.now() }
|
||||
}
|
||||
}
|
||||
|
@ -210,7 +210,7 @@ class TServerStorage implements ServerStorage {
|
||||
}
|
||||
}
|
||||
|
||||
const r = await ctx.with('adapter-tx', { domain: lastDomain }, async () => await adapter.tx(...part))
|
||||
const r = await ctx.with('adapter-tx', { domain: lastDomain }, async (ctx) => await adapter.tx(ctx, ...part))
|
||||
|
||||
// Update server live queries.
|
||||
for (const t of part) {
|
||||
@ -419,7 +419,9 @@ class TServerStorage implements ServerStorage {
|
||||
return await ctx.with(
|
||||
p + '-find-all',
|
||||
{ _class: clazz },
|
||||
() => this.getAdapter(domain).findAll(clazz, query, options),
|
||||
(ctx) => {
|
||||
return this.getAdapter(domain).findAll(ctx, clazz, query, options)
|
||||
},
|
||||
{ clazz, query, options }
|
||||
)
|
||||
}
|
||||
@ -791,7 +793,7 @@ class TServerStorage implements ServerStorage {
|
||||
await this.triggers.tx(tx)
|
||||
await this.modelDb.tx(tx)
|
||||
}
|
||||
await ctx.with('domain-tx', {}, async () => await this.getAdapter(DOMAIN_TX).tx(...txToStore))
|
||||
await ctx.with('domain-tx', {}, async (ctx) => await this.getAdapter(DOMAIN_TX).tx(ctx, ...txToStore))
|
||||
result.push(...(await ctx.with('apply', {}, (ctx) => this.routeTx(ctx, removedMap, ...txToProcess))))
|
||||
|
||||
// invoke triggers and store derived objects
|
||||
|
@ -26,6 +26,7 @@ import core, {
|
||||
FullTextData,
|
||||
Hierarchy,
|
||||
IndexingConfiguration,
|
||||
MeasureContext,
|
||||
Ref,
|
||||
Space,
|
||||
StorageIterator,
|
||||
@ -45,6 +46,7 @@ class ElasticDataAdapter implements DbAdapter {
|
||||
) {}
|
||||
|
||||
async findAll<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
_class: Ref<Class<T>>,
|
||||
query: DocumentQuery<T>,
|
||||
options?: FindOptions<T>
|
||||
@ -52,7 +54,7 @@ class ElasticDataAdapter implements DbAdapter {
|
||||
return Object.assign([], { total: 0 })
|
||||
}
|
||||
|
||||
async tx (...tx: Tx[]): Promise<TxResult[]> {
|
||||
async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
|
@ -116,11 +116,12 @@ describe('mongo operations', () => {
|
||||
await model.tx(t)
|
||||
}
|
||||
|
||||
const mctx = new MeasureMetricsContext('', {})
|
||||
const txStorage = await createMongoTxAdapter(hierarchy, mongodbUri, getWorkspaceId(dbId, ''), model)
|
||||
|
||||
// Put all transactions to Tx
|
||||
for (const t of txes) {
|
||||
await txStorage.tx(t)
|
||||
await txStorage.tx(mctx, t)
|
||||
}
|
||||
|
||||
const conf: DbConfiguration = {
|
||||
|
@ -16,6 +16,7 @@
|
||||
import core, {
|
||||
DOMAIN_MODEL,
|
||||
DOMAIN_TX,
|
||||
MeasureMetricsContext,
|
||||
SortingOrder,
|
||||
TxProcessor,
|
||||
cutObjectArray,
|
||||
@ -36,6 +37,7 @@ import core, {
|
||||
type Hierarchy,
|
||||
type IndexingConfiguration,
|
||||
type Lookup,
|
||||
type MeasureContext,
|
||||
type Mixin,
|
||||
type ModelDb,
|
||||
type Projection,
|
||||
@ -110,12 +112,12 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
|
||||
async init (): Promise<void> {}
|
||||
|
||||
async toArray<T>(cursor: AbstractCursor<T>): Promise<T[]> {
|
||||
async toArray<T>(ctx: MeasureContext, cursor: AbstractCursor<T>, limit?: number): Promise<T[]> {
|
||||
const data: T[] = []
|
||||
for await (const r of cursor.stream()) {
|
||||
data.push(r)
|
||||
}
|
||||
await cursor.close()
|
||||
void cursor.close()
|
||||
return data
|
||||
}
|
||||
|
||||
@ -144,7 +146,7 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
async tx (...tx: Tx[]): Promise<TxResult[]> {
|
||||
async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
@ -176,7 +178,7 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
translated._class = { $in: classes }
|
||||
} else if (typeof translated._class === 'string') {
|
||||
if (!classes.includes(translated._class)) {
|
||||
translated._class = { $in: classes }
|
||||
translated._class = { $in: classes.filter((it) => !this.hierarchy.isMixin(it)) }
|
||||
}
|
||||
} else if (typeof translated._class === 'object' && translated._class !== null) {
|
||||
let descendants: Ref<Class<Doc>>[] = classes
|
||||
@ -191,7 +193,7 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
descendants = descendants.filter((c) => !excludedClassesIds.has(c))
|
||||
}
|
||||
|
||||
translated._class = { $in: descendants }
|
||||
translated._class = { $in: descendants.filter((it: any) => !this.hierarchy.isMixin(it as Ref<Class<Doc>>)) }
|
||||
}
|
||||
|
||||
if (baseClass !== clazz) {
|
||||
@ -204,6 +206,9 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
delete translated._class
|
||||
}
|
||||
}
|
||||
if (translated._class?.$in?.length === 1 && translated._class?.$nin === undefined) {
|
||||
translated._class = translated._class.$in[0]
|
||||
}
|
||||
return translated
|
||||
}
|
||||
|
||||
@ -329,6 +334,7 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
}
|
||||
|
||||
private async fillLookupValue<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
clazz: Ref<Class<T>>,
|
||||
lookup: Lookup<T> | undefined,
|
||||
object: any,
|
||||
@ -348,7 +354,7 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
if (Array.isArray(value)) {
|
||||
const [_class, nested] = value
|
||||
await this.fillLookup(_class, object, key, fullKey, targetObject)
|
||||
await this.fillLookupValue(_class, nested, object, fullKey, targetObject.$lookup[key])
|
||||
await this.fillLookupValue(ctx, _class, nested, object, fullKey, targetObject.$lookup[key])
|
||||
} else {
|
||||
await this.fillLookup(value, object, key, fullKey, targetObject)
|
||||
}
|
||||
@ -421,6 +427,7 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
}
|
||||
|
||||
private async findWithPipeline<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
clazz: Ref<Class<T>>,
|
||||
query: DocumentQuery<T>,
|
||||
options?: FindOptions<T> & {
|
||||
@ -472,7 +479,10 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
const result: WithLookup<T>[] = []
|
||||
let total = options?.total === true ? 0 : -1
|
||||
try {
|
||||
const rres = await this.toArray(cursor)
|
||||
const rres = await ctx.with('toArray', {}, async (ctx) => await this.toArray(ctx, cursor, options?.limit), {
|
||||
domain,
|
||||
pipeline
|
||||
})
|
||||
for (const r of rres) {
|
||||
result.push(...r.results)
|
||||
total = options?.total === true ? r.totalCount?.shift()?.count ?? 0 : -1
|
||||
@ -482,7 +492,9 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
throw e
|
||||
}
|
||||
for (const row of result) {
|
||||
await this.fillLookupValue(clazz, options?.lookup, row)
|
||||
await ctx.with('fill-lookup', {}, async (ctx) => {
|
||||
await this.fillLookupValue(ctx, clazz, options?.lookup, row)
|
||||
})
|
||||
this.clearExtraLookups(row)
|
||||
}
|
||||
return toFindResult(this.stripHash(result), total)
|
||||
@ -570,44 +582,62 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
}
|
||||
|
||||
async findAll<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
_class: Ref<Class<T>>,
|
||||
query: DocumentQuery<T>,
|
||||
options?: FindOptions<T> & {
|
||||
domain?: Domain // Allow to find for Doc's in specified domain only.
|
||||
}
|
||||
): Promise<FindResult<T>> {
|
||||
// TODO: rework this
|
||||
if (options != null && (options?.lookup != null || this.isEnumSort(_class, options) || this.isRulesSort(options))) {
|
||||
return await this.findWithPipeline(_class, query, options)
|
||||
return await ctx.with('pipeline', {}, async (ctx) => await this.findWithPipeline(ctx, _class, query, options))
|
||||
}
|
||||
const domain = options?.domain ?? this.hierarchy.getDomain(_class)
|
||||
const coll = this.db.collection(domain)
|
||||
const mongoQuery = this.translateQuery(_class, query)
|
||||
|
||||
// We have limit 1 or _id === exact id
|
||||
if (options?.limit === 1 || typeof query._id === 'string') {
|
||||
const data = await ctx.with(
|
||||
'find-one',
|
||||
{ _class },
|
||||
async () =>
|
||||
await coll.findOne<T>(mongoQuery, {
|
||||
checkKeys: false,
|
||||
enableUtf8Validation: false,
|
||||
projection: this.calcProjection(options, _class),
|
||||
sort: this.collectSort<T>(options, _class)
|
||||
}),
|
||||
{
|
||||
_class,
|
||||
mongoQuery,
|
||||
domain
|
||||
}
|
||||
)
|
||||
if (data != null) {
|
||||
return toFindResult(this.stripHash([data]), 1)
|
||||
}
|
||||
return toFindResult([], 0)
|
||||
}
|
||||
|
||||
let cursor = coll.find<T>(mongoQuery, {
|
||||
checkKeys: false,
|
||||
enableUtf8Validation: false
|
||||
})
|
||||
|
||||
if (options?.projection !== undefined) {
|
||||
const projection: Projection<T> = {}
|
||||
for (const key in options.projection) {
|
||||
const ckey = this.checkMixinKey<T>(key, _class) as keyof T
|
||||
projection[ckey] = options.projection[key]
|
||||
const projection = this.calcProjection<T>(options, _class)
|
||||
if (projection != null) {
|
||||
cursor = cursor.project(projection)
|
||||
}
|
||||
cursor = cursor.project(projection)
|
||||
} else {
|
||||
cursor = cursor.project({ '%hash%': 0 })
|
||||
}
|
||||
let total: number = -1
|
||||
if (options !== null && options !== undefined) {
|
||||
if (options.sort !== undefined) {
|
||||
const sort: Sort = {}
|
||||
for (const key in options.sort) {
|
||||
const ckey = this.checkMixinKey<T>(key, _class)
|
||||
const order = options.sort[key] === SortingOrder.Ascending ? 1 : -1
|
||||
sort[ckey] = order
|
||||
const sort = this.collectSort<T>(options, _class)
|
||||
if (sort !== undefined) {
|
||||
cursor = cursor.sort(sort)
|
||||
}
|
||||
cursor = cursor.sort(sort)
|
||||
}
|
||||
if (options.limit !== undefined) {
|
||||
if (options.total === true) {
|
||||
@ -619,7 +649,11 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
|
||||
// Error in case of timeout
|
||||
try {
|
||||
const res: T[] = await this.toArray(cursor)
|
||||
const res: T[] = await ctx.with('toArray', {}, async (ctx) => await this.toArray(ctx, cursor, options?.limit), {
|
||||
mongoQuery,
|
||||
options,
|
||||
domain
|
||||
})
|
||||
if (options?.total === true && options?.limit === undefined) {
|
||||
total = res.length
|
||||
}
|
||||
@ -630,6 +664,55 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
private collectSort<T extends Doc>(
|
||||
options:
|
||||
| (FindOptions<T> & {
|
||||
domain?: Domain | undefined // Allow to find for Doc's in specified domain only.
|
||||
})
|
||||
| undefined,
|
||||
_class: Ref<Class<T>>
|
||||
): Sort | undefined {
|
||||
if (options?.sort === undefined) {
|
||||
return undefined
|
||||
}
|
||||
const sort: Sort = {}
|
||||
let count = 0
|
||||
for (const key in options.sort) {
|
||||
const ckey = this.checkMixinKey<T>(key, _class)
|
||||
const order = options.sort[key] === SortingOrder.Ascending ? 1 : -1
|
||||
sort[ckey] = order
|
||||
count++
|
||||
}
|
||||
if (count === 0) {
|
||||
return undefined
|
||||
}
|
||||
return sort
|
||||
}
|
||||
|
||||
private calcProjection<T extends Doc>(
|
||||
options:
|
||||
| (FindOptions<T> & {
|
||||
domain?: Domain | undefined // Allow to find for Doc's in specified domain only.
|
||||
})
|
||||
| undefined,
|
||||
_class: Ref<Class<T>>
|
||||
): Projection<T> | undefined {
|
||||
if (options?.projection === undefined) {
|
||||
return undefined
|
||||
}
|
||||
const projection: Projection<T> = {}
|
||||
let count = 0
|
||||
for (const key in options.projection ?? []) {
|
||||
const ckey = this.checkMixinKey<T>(key, _class) as keyof T
|
||||
projection[ckey] = options.projection[key]
|
||||
count++
|
||||
}
|
||||
if (count === 0) {
|
||||
return undefined
|
||||
}
|
||||
return projection
|
||||
}
|
||||
|
||||
stripHash<T extends Doc>(docs: T[]): T[] {
|
||||
docs.forEach((it) => {
|
||||
if ('%hash%' in it) {
|
||||
@ -735,7 +818,7 @@ abstract class MongoAdapterBase implements DbAdapter {
|
||||
return []
|
||||
}
|
||||
const cursor = this.db.collection<Doc>(domain).find<Doc>({ _id: { $in: docs } })
|
||||
const result = await this.toArray(cursor)
|
||||
const result = await this.toArray(new MeasureMetricsContext('', {}), cursor, docs.length)
|
||||
return this.stripHash(this.stripHash(result))
|
||||
}
|
||||
|
||||
@ -830,7 +913,7 @@ class MongoAdapter extends MongoAdapterBase {
|
||||
console.error('Unknown/Unsupported operation:', tx._class, tx)
|
||||
}
|
||||
|
||||
async tx (...txes: Tx[]): Promise<TxResult[]> {
|
||||
async tx (ctx: MeasureContext, ...txes: Tx[]): Promise<TxResult[]> {
|
||||
const result: TxResult[] = []
|
||||
|
||||
const bulkOperations: DomainOperation[] = []
|
||||
@ -860,7 +943,14 @@ class MongoAdapter extends MongoAdapterBase {
|
||||
}
|
||||
if (dop.bulk === undefined) {
|
||||
// Execute previous bulk and capture result.
|
||||
await bulkExecute()
|
||||
await ctx.with(
|
||||
'bulkExecute',
|
||||
{},
|
||||
async () => {
|
||||
await bulkExecute()
|
||||
},
|
||||
{ txes: cutObjectArray(tx) }
|
||||
)
|
||||
try {
|
||||
result.push(await dop.raw())
|
||||
} catch (err: any) {
|
||||
@ -871,9 +961,23 @@ class MongoAdapter extends MongoAdapterBase {
|
||||
if (lastDomain === undefined) {
|
||||
lastDomain = dop.domain
|
||||
}
|
||||
if (lastDomain !== dop.domain) {
|
||||
// If we have domain switch, let's execute previous bulk and start new one.
|
||||
await ctx.with(
|
||||
'bulkExecute',
|
||||
{},
|
||||
async () => {
|
||||
await bulkExecute()
|
||||
},
|
||||
{ operations: cutObjectArray(bulkOperations) }
|
||||
)
|
||||
lastDomain = dop.domain
|
||||
}
|
||||
bulkOperations.push(dop)
|
||||
}
|
||||
await bulkExecute()
|
||||
await ctx.with('bulkExecute', {}, async () => {
|
||||
await bulkExecute()
|
||||
})
|
||||
} else {
|
||||
const r = await this.getOperations(txes[0])?.raw()
|
||||
if (r !== undefined) {
|
||||
@ -1160,11 +1264,11 @@ class MongoAdapter extends MongoAdapterBase {
|
||||
class MongoTxAdapter extends MongoAdapterBase implements TxAdapter {
|
||||
txColl: Collection | undefined
|
||||
|
||||
override async tx (...tx: Tx[]): Promise<TxResult[]> {
|
||||
override async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
|
||||
if (tx.length === 0) {
|
||||
return []
|
||||
}
|
||||
await this.txCollection().insertMany(tx.map((it) => translateDoc(it)))
|
||||
await ctx.with('insertMany', {}, async () => await this.txCollection().insertMany(tx.map((it) => translateDoc(it))))
|
||||
return []
|
||||
}
|
||||
|
||||
@ -1181,7 +1285,7 @@ class MongoTxAdapter extends MongoAdapterBase implements TxAdapter {
|
||||
.collection(DOMAIN_TX)
|
||||
.find<Tx>({ objectSpace: core.space.Model })
|
||||
.sort({ _id: 1, modifiedOn: 1 })
|
||||
const model = await this.toArray(cursor)
|
||||
const model = await this.toArray(new MeasureMetricsContext('', {}), cursor)
|
||||
// We need to put all core.account.System transactions first
|
||||
const systemTx: Tx[] = []
|
||||
const userTx: Tx[] = []
|
||||
|
@ -42,7 +42,10 @@ export async function getMongoClient (uri: string, options?: MongoClientOptions)
|
||||
const client = await MongoClient.connect(uri, {
|
||||
...options,
|
||||
enableUtf8Validation: false,
|
||||
maxConnecting: 1024
|
||||
maxConnecting: 1024,
|
||||
minPoolSize: 128,
|
||||
maxPoolSize: 512,
|
||||
zlibCompressionLevel: 0
|
||||
})
|
||||
connections.push(client)
|
||||
return client
|
||||
|
@ -24,6 +24,7 @@ import core, {
|
||||
FindResult,
|
||||
Hierarchy,
|
||||
IndexingConfiguration,
|
||||
MeasureContext,
|
||||
ModelDb,
|
||||
Ref,
|
||||
Space,
|
||||
@ -42,6 +43,7 @@ class MinioBlobAdapter implements DbAdapter {
|
||||
) {}
|
||||
|
||||
async findAll<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
_class: Ref<Class<T>>,
|
||||
query: DocumentQuery<T>,
|
||||
options?: FindOptions<T>
|
||||
@ -49,7 +51,7 @@ class MinioBlobAdapter implements DbAdapter {
|
||||
return Object.assign([], { total: 0 })
|
||||
}
|
||||
|
||||
async tx (...tx: Tx[]): Promise<TxResult[]> {
|
||||
async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
|
@ -22,11 +22,11 @@ import {
|
||||
IndexStageState,
|
||||
MeasureContext,
|
||||
Ref,
|
||||
Storage,
|
||||
WorkspaceId
|
||||
} from '@hcengineering/core'
|
||||
import {
|
||||
contentStageId,
|
||||
DbAdapter,
|
||||
docKey,
|
||||
DocUpdateHandler,
|
||||
docUpdKey,
|
||||
@ -65,10 +65,10 @@ export class LibRetranslateStage implements TranslationStage {
|
||||
|
||||
constructor (readonly workspaceId: WorkspaceId) {}
|
||||
|
||||
async initialize (storage: Storage, pipeline: FullTextPipeline): Promise<void> {
|
||||
async initialize (ctx: MeasureContext, storage: DbAdapter, pipeline: FullTextPipeline): Promise<void> {
|
||||
// Just do nothing
|
||||
try {
|
||||
const config = await storage.findAll(translatePlugin.class.TranslateConfiguration, {})
|
||||
const config = await storage.findAll(ctx, translatePlugin.class.TranslateConfiguration, {})
|
||||
if (config.length > 0) {
|
||||
this.enabled = config[0].enabled
|
||||
this.token = config[0].token
|
||||
@ -81,10 +81,17 @@ export class LibRetranslateStage implements TranslationStage {
|
||||
this.enabled = false
|
||||
}
|
||||
|
||||
;[this.stageValue, this.indexState] = await loadIndexStageStage(storage, this.indexState, this.stageId, 'config', {
|
||||
enabled: this.enabled,
|
||||
endpoint: this.endpoint
|
||||
})
|
||||
;[this.stageValue, this.indexState] = await loadIndexStageStage(
|
||||
ctx,
|
||||
storage,
|
||||
this.indexState,
|
||||
this.stageId,
|
||||
'config',
|
||||
{
|
||||
enabled: this.enabled,
|
||||
endpoint: this.endpoint
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async search (
|
||||
|
@ -34,7 +34,7 @@
|
||||
"prettier": "^3.1.0",
|
||||
"prettier-plugin-svelte": "^3.1.0",
|
||||
"typescript": "^5.3.3",
|
||||
"@playwright/test": "^1.40.1",
|
||||
"@playwright/test": "^1.41.2",
|
||||
"allure-playwright": "^2.9.2"
|
||||
},
|
||||
"dependencies": {
|
||||
|
@ -79,25 +79,42 @@ export class VacanciesPage extends CommonRecruitingPage {
|
||||
}
|
||||
}
|
||||
|
||||
async exportVacanciesWithCheck (textToCheck: string): Promise<void> {
|
||||
const downloadPromise = this.page.waitForEvent('download')
|
||||
await this.buttonExport.click()
|
||||
const download = await downloadPromise
|
||||
async exportVacanciesWithCheck (textToCheck: string, timeout: number): Promise<void> {
|
||||
let expired = 2
|
||||
while (true) {
|
||||
let shouldExit = false
|
||||
const downloadPromise = this.page.waitForEvent('download')
|
||||
await this.buttonExport.click()
|
||||
const download = await downloadPromise
|
||||
const readable = await download.createReadStream()
|
||||
await new Promise<void>((resolve) => {
|
||||
const chunks: string[] = []
|
||||
|
||||
const chunks: string[] = []
|
||||
const readable = await download.createReadStream()
|
||||
readable.on('readable', () => {
|
||||
let chunk
|
||||
while ((chunk = readable.read()) !== null) {
|
||||
chunks.push(chunk)
|
||||
}
|
||||
})
|
||||
|
||||
readable.on('readable', () => {
|
||||
let chunk
|
||||
while ((chunk = readable.read()) !== null) {
|
||||
chunks.push(chunk)
|
||||
readable.on('end', () => {
|
||||
const content = chunks.join('')
|
||||
if (content.includes(textToCheck)) {
|
||||
shouldExit = true
|
||||
} else {
|
||||
expired--
|
||||
}
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
if (shouldExit) {
|
||||
return
|
||||
}
|
||||
})
|
||||
|
||||
readable.on('end', () => {
|
||||
const content = chunks.join('')
|
||||
expect(content).toContain(textToCheck)
|
||||
})
|
||||
await new Promise((resolve) => setTimeout(resolve, timeout / 2))
|
||||
if (expired === 0) {
|
||||
expect('').toContain(textToCheck)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async showArchivedVacancy (): Promise<void> {
|
||||
|
Loading…
Reference in New Issue
Block a user