Merge remote-tracking branch 'origin/develop'

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2024-08-02 22:39:01 +07:00
commit c8c6a8f02d
No known key found for this signature in database
GPG Key ID: BD80F68D68D8F7F2
6 changed files with 116 additions and 20 deletions

View File

@ -2,12 +2,14 @@ const exec = require('child_process').exec
exec('git describe --tags `git rev-list --tags --max-count=1`', (err, stdout, stderr) => {
if (err !== null) {
console.log('Error', err)
process.exit(1)
}
const tag = stdout.trim()
console.log('Check changes for tag:', tag)
exec(`git fetch --tags && git diff ${tag} --name-only`, (err, stdout, stderr) => {
if (err !== null) {
console.log('Error', err)
process.exit(1)
}
const changedFiles = stdout.trim().split('\n')

View File

@ -1,6 +1,6 @@
//
// Copyright © 2020, 2021 Anticrm Platform Contributors.
// Copyright © 2021 Hardcore Engineering Inc.
// Copyright © 2021, 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
@ -74,7 +74,7 @@ import { consoleModelLogger, type MigrateOperation } from '@hcengineering/model'
import contact from '@hcengineering/model-contact'
import { getMongoClient, getWorkspaceDB } from '@hcengineering/mongo'
import { openAIConfigDefaults } from '@hcengineering/openai'
import type { StorageAdapter } from '@hcengineering/server-core'
import type { StorageAdapter, StorageAdapterEx } from '@hcengineering/server-core'
import { deepEqual } from 'fast-equals'
import { createWriteStream, readFileSync } from 'fs'
import { benchmark, benchmarkWorker } from './benchmark'
@ -95,6 +95,7 @@ import { fixJsonMarkup } from './markup'
import { fixMixinForeignAttributes, showMixinForeignAttributes } from './mixin'
import { openAIConfig } from './openai'
import { fixAccountEmails, renameAccount } from './renameAccount'
import { moveFiles } from './storage'
const colorConstants = {
colorRed: '\u001b[31m',
@ -1040,6 +1041,37 @@ export function devTool (
})
})
program
.command('move-files')
.option('-w, --workspace <workspace>', 'Selected workspace only', '')
.action(async (cmd: { workspace: string }) => {
const { mongodbUri } = prepareTools()
await withDatabase(mongodbUri, async (db, client) => {
await withStorage(mongodbUri, async (adapter) => {
try {
const exAdapter = adapter as StorageAdapterEx
if (exAdapter.adapters === undefined || exAdapter.adapters.size < 2) {
throw new Error('bad storage config, at least two storage providers are required')
}
console.log('moving files to storage provider', exAdapter.defaultAdapter)
const workspaces = await listWorkspacesPure(db, productId)
for (const workspace of workspaces) {
if (cmd.workspace !== '' && workspace.workspace !== cmd.workspace) {
continue
}
const wsId = getWorkspaceId(workspace.workspace, productId)
await moveFiles(toolCtx, wsId, exAdapter)
}
} catch (err: any) {
console.error(err)
}
})
})
})
program.command('fix-bw-workspace <workspace>').action(async (workspace: string) => {
const { mongodbUri } = prepareTools()
await withStorage(mongodbUri, async (adapter) => {

60
dev/tool/src/storage.ts Normal file
View File

@ -0,0 +1,60 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import { type MeasureContext, type WorkspaceId } from '@hcengineering/core'
import { type StorageAdapterEx } from '@hcengineering/server-core'
import { PassThrough } from 'stream'
export async function moveFiles (
ctx: MeasureContext,
workspaceId: WorkspaceId,
exAdapter: StorageAdapterEx
): Promise<void> {
if (exAdapter.adapters === undefined) return
let count = 0
console.log('start', workspaceId.name)
// We assume that the adapter moves all new files to the default adapter
const target = exAdapter.defaultAdapter
await exAdapter.adapters.get(target)?.make(ctx, workspaceId)
for (const [name, adapter] of exAdapter.adapters.entries()) {
if (name === target) continue
const iterator = await adapter.listStream(ctx, workspaceId)
while (true) {
const data = await iterator.next()
if (data === undefined) break
const blob = await exAdapter.stat(ctx, workspaceId, data._id)
if (blob === undefined) continue
if (blob.provider === target) continue
const readable = await exAdapter.get(ctx, workspaceId, data._id)
const stream = readable.pipe(new PassThrough())
await exAdapter.put(ctx, workspaceId, data._id, stream, blob.contentType, blob.size)
count += 1
if (count % 100 === 0) {
console.log('...moved: ', count)
}
}
await iterator.close()
}
console.log('...done', workspaceId.name, count)
}

View File

@ -13,9 +13,8 @@
// limitations under the License.
//
import { getEmbeddedLabel, IntlString, PlatformError, unknownError } from '@hcengineering/platform'
import { getEmbeddedLabel, IntlString } from '@hcengineering/platform'
import { deepEqual } from 'fast-equals'
import { DOMAIN_BENCHMARK } from './benchmark'
import {
Account,
AccountRole,
@ -47,6 +46,7 @@ import { TxOperations } from './operations'
import { isPredicate } from './predicate'
import { DocumentQuery, FindResult } from './storage'
import { DOMAIN_TX } from './tx'
import { DOMAIN_BENCHMARK } from './benchmark'
function toHex (value: number, chars: number): string {
const result = value.toString(16)
@ -355,6 +355,7 @@ export class DocManager<T extends Doc> implements IDocManager<T> {
export class RateLimiter {
idCounter: number = 0
processingQueue = new Map<number, Promise<void>>()
last: number = 0
rate: number
@ -365,21 +366,21 @@ export class RateLimiter {
}
notify: (() => void)[] = []
finished: boolean = false
async exec<T, B extends Record<string, any> = any>(op: (args?: B) => Promise<T>, args?: B): Promise<T> {
if (this.finished) {
throw new PlatformError(unknownError('No Possible to add/exec on finished queue'))
}
while (this.notify.length >= this.rate) {
const processingId = this.idCounter++
while (this.processingQueue.size >= this.rate) {
await new Promise<void>((resolve) => {
this.notify.push(resolve)
})
}
try {
const p = op(args)
this.processingQueue.set(processingId, p as Promise<void>)
return await p
} finally {
this.processingQueue.delete(processingId)
const n = this.notify.shift()
if (n !== undefined) {
n()
@ -388,7 +389,7 @@ export class RateLimiter {
}
async add<T, B extends Record<string, any> = any>(op: (args?: B) => Promise<T>, args?: B): Promise<void> {
if (this.notify.length < this.rate) {
if (this.processingQueue.size < this.rate) {
void this.exec(op, args)
} else {
await this.exec(op, args)
@ -396,12 +397,7 @@ export class RateLimiter {
}
async waitProcessing (): Promise<void> {
this.finished = true
while (this.notify.length > 0) {
await new Promise<void>((resolve) => {
this.notify.push(resolve)
})
}
await Promise.all(this.processingQueue.values())
}
}

View File

@ -24,7 +24,7 @@ describe('aggregator tests', () => {
const ws1: WorkspaceId = { name: 'ws1', productId: '' }
return { mem1, mem2, aggr, ws1, testCtx }
}
it('reuse existing storage', async () => {
it('not reuse existing storage', async () => {
const { mem1, aggr, ws1, testCtx } = prepare1()
// Test default provider
@ -37,7 +37,7 @@ describe('aggregator tests', () => {
// Test content typed provider
await aggr.put(testCtx, ws1, 'test', 'data2', 'text/plain')
const stat2 = await aggr.stat(testCtx, ws1, 'test')
expect(stat2?.provider).toEqual('mem1')
expect(stat2?.provider).toEqual('mem2')
const dta = Buffer.concat(await aggr.read(testCtx, ws1, 'test')).toString()
expect(dta).toEqual('data2')

View File

@ -317,12 +317,11 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
contentType: string,
size?: number | undefined
): Promise<UploadedObjectInfo> {
// We need to reuse same provider for existing documents.
const stat = (
await this.dbAdapter.find<Blob>(ctx, workspaceId, DOMAIN_BLOB, { _id: objectName as Ref<Blob> }, { limit: 1 })
).shift()
const { provider, adapter } = this.selectProvider(stat?.provider, contentType)
const { provider, adapter } = this.selectProvider(undefined, contentType)
const result = await adapter.put(ctx, workspaceId, objectName, stream, contentType, size)
@ -351,6 +350,13 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
}
await this.dbAdapter.upload<Blob>(ctx, workspaceId, DOMAIN_BLOB, [blobDoc])
// If the file is already stored in different provider, we need to remove it.
if (stat !== undefined && stat.provider !== provider) {
const adapter = this.adapters.get(stat.provider)
await adapter?.remove(ctx, workspaceId, [stat._id])
}
return result
}
}