mirror of
https://github.com/hcengineering/platform.git
synced 2025-04-30 20:25:38 +00:00
UBERF-9429: Provide workspace ids to storage adapters (#7956)
Signed-off-by: Alexey Zinoviev <alexey.zinoviev@xored.com>
This commit is contained in:
parent
ddc8230890
commit
e8df531654
@ -20,7 +20,6 @@ import core, {
|
||||
MeasureContext,
|
||||
Ref,
|
||||
TxOperations,
|
||||
type WorkspaceDataId,
|
||||
generateId,
|
||||
makeDocCollabId,
|
||||
systemAccountUuid,
|
||||
@ -273,14 +272,12 @@ export async function processImages (
|
||||
|
||||
// upload
|
||||
const uuid = generateId()
|
||||
await storageAdapter.put(
|
||||
ctx,
|
||||
workspaceDataId ?? (workspaceId as unknown as WorkspaceDataId),
|
||||
uuid,
|
||||
fileContents,
|
||||
mimeType,
|
||||
fileSize
|
||||
)
|
||||
const wsIds = {
|
||||
uuid: workspaceId,
|
||||
dataId: workspaceDataId,
|
||||
url: ''
|
||||
}
|
||||
await storageAdapter.put(ctx, wsIds, uuid, fileContents, mimeType, fileSize)
|
||||
|
||||
// attachment
|
||||
const attachmentId: Ref<Attachment> = generateId()
|
||||
|
@ -54,6 +54,7 @@ import core, {
|
||||
type TxUpdateDoc,
|
||||
type WorkspaceUuid,
|
||||
type WorkspaceDataId,
|
||||
type WorkspaceIds,
|
||||
generateId,
|
||||
getObjectValue,
|
||||
toIdMap,
|
||||
@ -172,12 +173,12 @@ export async function cleanWorkspace (
|
||||
|
||||
export async function fixMinioBW (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
storageService: StorageAdapter
|
||||
): Promise<void> {
|
||||
console.log('try clean bw miniature for ', workspaceId)
|
||||
console.log('try clean bw miniature for ', wsIds)
|
||||
const from = new Date(new Date().setDate(new Date().getDate() - 7)).getTime()
|
||||
const list = await storageService.listStream(ctx, workspaceId)
|
||||
const list = await storageService.listStream(ctx, wsIds)
|
||||
let removed = 0
|
||||
while (true) {
|
||||
const objs = await list.next()
|
||||
@ -187,7 +188,7 @@ export async function fixMinioBW (
|
||||
for (const obj of objs) {
|
||||
if (obj.modifiedOn < from) continue
|
||||
if ((obj._id as string).includes('%preview%')) {
|
||||
await storageService.remove(ctx, workspaceId, [obj._id])
|
||||
await storageService.remove(ctx, wsIds, [obj._id])
|
||||
removed++
|
||||
if (removed % 100 === 0) {
|
||||
console.log('removed: ', removed)
|
||||
|
@ -47,6 +47,7 @@ import {
|
||||
MeasureMetricsContext,
|
||||
metricsToString,
|
||||
type PersonId,
|
||||
type WorkspaceUuid,
|
||||
type Data,
|
||||
type Tx,
|
||||
type Version,
|
||||
@ -1136,13 +1137,9 @@ export function devTool (
|
||||
.action(async (bucketName: string, dirName: string, storeIn: string, cmd) => {
|
||||
const backupStorageConfig = storageConfigFromEnv(process.env.STORAGE)
|
||||
const storageAdapter = createStorageFromConfig(backupStorageConfig.storages[0])
|
||||
const backupIds = { uuid: bucketName as WorkspaceUuid, dataId: bucketName as WorkspaceDataId, url: '' }
|
||||
try {
|
||||
const storage = await createStorageBackupStorage(
|
||||
toolCtx,
|
||||
storageAdapter,
|
||||
bucketName as WorkspaceDataId,
|
||||
dirName
|
||||
)
|
||||
const storage = await createStorageBackupStorage(toolCtx, storageAdapter, backupIds, dirName)
|
||||
await backupDownload(storage, storeIn)
|
||||
} catch (err: any) {
|
||||
toolCtx.error('failed to size backup', { err })
|
||||
|
@ -28,9 +28,9 @@ import core, {
|
||||
type Ref,
|
||||
type TxCreateDoc,
|
||||
type TxUpdateDoc,
|
||||
type WorkspaceDataId,
|
||||
DOMAIN_TX,
|
||||
SortingOrder,
|
||||
type WorkspaceIds,
|
||||
makeCollabId,
|
||||
makeCollabYdocId,
|
||||
makeDocCollabId
|
||||
@ -49,7 +49,7 @@ export interface RestoreWikiContentParams {
|
||||
export async function restoreWikiContentMongo (
|
||||
ctx: MeasureContext,
|
||||
db: Db,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
storageAdapter: StorageAdapter,
|
||||
params: RestoreWikiContentParams
|
||||
): Promise<void> {
|
||||
@ -74,17 +74,17 @@ export async function restoreWikiContentMongo (
|
||||
|
||||
const correctCollabId = { objectClass: doc._class, objectId: doc._id, objectAttr: 'content' }
|
||||
|
||||
const wrongYdocId = await findWikiDocYdocName(ctx, db, workspaceId, doc._id)
|
||||
const wrongYdocId = await findWikiDocYdocName(ctx, db, doc._id)
|
||||
if (wrongYdocId === undefined) {
|
||||
console.log('current ydoc not found', doc._id)
|
||||
continue
|
||||
}
|
||||
|
||||
const stat = storageAdapter.stat(ctx, workspaceId, wrongYdocId)
|
||||
const stat = storageAdapter.stat(ctx, wsIds, wrongYdocId)
|
||||
if (stat === undefined) continue
|
||||
|
||||
const ydoc1 = await loadCollabYdoc(ctx, storageAdapter, workspaceId, correctCollabId)
|
||||
const ydoc2 = await loadCollabYdoc(ctx, storageAdapter, workspaceId, wrongYdocId)
|
||||
const ydoc1 = await loadCollabYdoc(ctx, storageAdapter, wsIds, correctCollabId)
|
||||
const ydoc2 = await loadCollabYdoc(ctx, storageAdapter, wsIds, wrongYdocId)
|
||||
|
||||
if (ydoc1 !== undefined && ydoc1.share.has('content')) {
|
||||
// There already is content, we should skip the document
|
||||
@ -103,7 +103,7 @@ export async function restoreWikiContentMongo (
|
||||
yDocCopyXmlField(ydoc2, 'description', 'content')
|
||||
}
|
||||
|
||||
await saveCollabYdoc(ctx, storageAdapter, workspaceId, correctCollabId, ydoc2)
|
||||
await saveCollabYdoc(ctx, storageAdapter, wsIds, correctCollabId, ydoc2)
|
||||
}
|
||||
restoredCnt++
|
||||
} catch (err: any) {
|
||||
@ -119,7 +119,6 @@ export async function restoreWikiContentMongo (
|
||||
export async function findWikiDocYdocName (
|
||||
ctx: MeasureContext,
|
||||
db: Db,
|
||||
workspaceId: WorkspaceDataId,
|
||||
doc: Ref<Document>
|
||||
): Promise<Ref<Blob> | undefined> {
|
||||
const updateContentTx = await db.collection<TxUpdateDoc<Document & { content: string }>>(DOMAIN_TX).findOne(
|
||||
@ -198,7 +197,7 @@ export interface RestoreControlledDocContentParams {
|
||||
export async function restoreControlledDocContentMongo (
|
||||
ctx: MeasureContext,
|
||||
db: Db,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
storageAdapter: StorageAdapter,
|
||||
params: RestoreWikiContentParams
|
||||
): Promise<void> {
|
||||
@ -220,15 +219,7 @@ export async function restoreControlledDocContentMongo (
|
||||
const doc = await iterator.next()
|
||||
if (doc === null) break
|
||||
|
||||
const restored = await restoreControlledDocContentForDoc(
|
||||
ctx,
|
||||
db,
|
||||
workspaceId,
|
||||
storageAdapter,
|
||||
params,
|
||||
doc,
|
||||
'content'
|
||||
)
|
||||
const restored = await restoreControlledDocContentForDoc(ctx, db, wsIds, storageAdapter, params, doc, 'content')
|
||||
if (restored) {
|
||||
restoredCnt++
|
||||
}
|
||||
@ -247,7 +238,7 @@ export async function restoreControlledDocContentMongo (
|
||||
export async function restoreControlledDocContentForDoc (
|
||||
ctx: MeasureContext,
|
||||
db: Db,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
storageAdapter: StorageAdapter,
|
||||
params: RestoreWikiContentParams,
|
||||
doc: Doc,
|
||||
@ -272,7 +263,7 @@ export async function restoreControlledDocContentForDoc (
|
||||
const ydocId = makeCollabYdocId(makeDocCollabId(doc, attribute))
|
||||
|
||||
// Ensure that we don't have new content in storage
|
||||
const stat = await storageAdapter.stat(ctx, workspaceId, ydocId)
|
||||
const stat = await storageAdapter.stat(ctx, wsIds, ydocId)
|
||||
if (stat !== undefined) {
|
||||
console.log('content already restored', doc._class, doc._id, ydocId)
|
||||
return false
|
||||
@ -281,15 +272,15 @@ export async function restoreControlledDocContentForDoc (
|
||||
console.log('restoring content', doc._id, currentYdocId, '-->', ydocId)
|
||||
if (!params.dryRun) {
|
||||
try {
|
||||
const stat = await storageAdapter.stat(ctx, workspaceId, currentYdocId)
|
||||
const stat = await storageAdapter.stat(ctx, wsIds, currentYdocId)
|
||||
if (stat === undefined) {
|
||||
console.log('no content to restore', doc._class, doc._id, ydocId)
|
||||
return false
|
||||
}
|
||||
|
||||
const data = await storageAdapter.read(ctx, workspaceId, currentYdocId)
|
||||
const data = await storageAdapter.read(ctx, wsIds, currentYdocId)
|
||||
const buffer = Buffer.concat(data as any)
|
||||
await storageAdapter.put(ctx, workspaceId, ydocId, buffer, 'application/ydoc', buffer.length)
|
||||
await storageAdapter.put(ctx, wsIds, ydocId, buffer, 'application/ydoc', buffer.length)
|
||||
} catch (err: any) {
|
||||
console.error('failed to restore content for', doc._class, doc._id, err)
|
||||
return false
|
||||
@ -302,7 +293,7 @@ export async function restoreControlledDocContentForDoc (
|
||||
export async function restoreMarkupRefsMongo (
|
||||
ctx: MeasureContext,
|
||||
db: Db,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
hierarchy: Hierarchy,
|
||||
storageAdapter: StorageAdapter
|
||||
): Promise<void> {
|
||||
@ -347,10 +338,10 @@ export async function restoreMarkupRefsMongo (
|
||||
const ydocId = makeCollabYdocId(collabId)
|
||||
|
||||
try {
|
||||
const buffer = await storageAdapter.read(ctx, workspaceId, ydocId)
|
||||
const buffer = await storageAdapter.read(ctx, wsIds, ydocId)
|
||||
const ydoc = yDocFromBuffer(Buffer.concat(buffer as any))
|
||||
|
||||
const jsonId = await saveCollabJson(ctx, storageAdapter, workspaceId, collabId, ydoc)
|
||||
const jsonId = await saveCollabJson(ctx, storageAdapter, wsIds, collabId, ydoc)
|
||||
await collection.updateOne({ _id: doc._id }, { $set: { [attributeName]: jsonId } })
|
||||
} catch {}
|
||||
}
|
||||
|
@ -20,7 +20,8 @@ import {
|
||||
type Ref,
|
||||
concatLink,
|
||||
RateLimiter,
|
||||
type WorkspaceDataId
|
||||
type WorkspaceIds,
|
||||
type WorkspaceUuid
|
||||
} from '@hcengineering/core'
|
||||
import { type DatalakeClient } from '@hcengineering/datalake'
|
||||
import { type UploadObjectParams } from '@hcengineering/datalake/types/client'
|
||||
@ -42,7 +43,7 @@ export interface MoveFilesParams {
|
||||
|
||||
export async function moveFiles (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
exAdapter: StorageAdapterEx,
|
||||
params: MoveFilesParams
|
||||
): Promise<void> {
|
||||
@ -52,7 +53,7 @@ export async function moveFiles (
|
||||
if (target === undefined) return
|
||||
|
||||
// We assume that the adapter moves all new files to the default adapter
|
||||
await target.make(ctx, workspaceId)
|
||||
await target.make(ctx, wsIds)
|
||||
|
||||
for (const { name, adapter } of exAdapter.adapters.slice(1).reverse()) {
|
||||
console.log('moving from', name, 'limit', 'concurrency', params.concurrency)
|
||||
@ -60,14 +61,14 @@ export async function moveFiles (
|
||||
// we attempt retry the whole process in case of failure
|
||||
// files that were already moved will be skipped
|
||||
await retryOnFailure(ctx, 5, async () => {
|
||||
await processAdapter(ctx, exAdapter, adapter, target, workspaceId, params)
|
||||
await processAdapter(ctx, exAdapter, adapter, target, wsIds, params)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function showLostFiles (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
db: Db,
|
||||
storageAdapter: StorageAdapter,
|
||||
{ showAll }: { showAll: boolean }
|
||||
@ -81,7 +82,7 @@ export async function showLostFiles (
|
||||
const { _id, _class, file, name, modifiedOn } = attachment
|
||||
const date = new Date(modifiedOn).toISOString()
|
||||
|
||||
const stat = await storageAdapter.stat(ctx, workspaceId, file)
|
||||
const stat = await storageAdapter.stat(ctx, wsIds, file)
|
||||
if (stat === undefined) {
|
||||
console.warn('-', date, _class, _id, file, name)
|
||||
} else if (showAll) {
|
||||
@ -95,7 +96,7 @@ async function processAdapter (
|
||||
exAdapter: StorageAdapterEx,
|
||||
source: StorageAdapter,
|
||||
target: StorageAdapter,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
params: MoveFilesParams
|
||||
): Promise<void> {
|
||||
if (source === target) {
|
||||
@ -128,9 +129,9 @@ async function processAdapter (
|
||||
|
||||
const rateLimiter = new RateLimiter(params.concurrency)
|
||||
|
||||
const iterator = await source.listStream(ctx, workspaceId)
|
||||
const iterator = await source.listStream(ctx, wsIds)
|
||||
|
||||
const targetIterator = await target.listStream(ctx, workspaceId)
|
||||
const targetIterator = await target.listStream(ctx, wsIds)
|
||||
|
||||
const targetBlobs = new Map<Ref<Blob>, ListBlobResult>()
|
||||
|
||||
@ -165,7 +166,7 @@ async function processAdapter (
|
||||
}
|
||||
|
||||
if (targetBlob === undefined) {
|
||||
const sourceBlob = await source.stat(ctx, workspaceId, data._id)
|
||||
const sourceBlob = await source.stat(ctx, wsIds, data._id)
|
||||
|
||||
if (sourceBlob === undefined) {
|
||||
console.error('blob not found', data._id)
|
||||
@ -177,7 +178,7 @@ async function processAdapter (
|
||||
ctx,
|
||||
5,
|
||||
async () => {
|
||||
return await processFile(ctx, source, target, workspaceId, sourceBlob)
|
||||
return await processFile(ctx, source, target, wsIds, sourceBlob)
|
||||
},
|
||||
50
|
||||
)
|
||||
@ -209,7 +210,7 @@ async function processAdapter (
|
||||
if (toRemove.length > 0 && params.move) {
|
||||
while (toRemove.length > 0) {
|
||||
const part = toRemove.splice(0, 500)
|
||||
await source.remove(ctx, workspaceId, part)
|
||||
await source.remove(ctx, wsIds, part)
|
||||
}
|
||||
}
|
||||
printStats()
|
||||
@ -222,16 +223,16 @@ async function processFile (
|
||||
ctx: MeasureContext,
|
||||
source: Pick<StorageAdapter, 'get'>,
|
||||
target: Pick<StorageAdapter, 'put'>,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
blob: Blob
|
||||
): Promise<UploadedObjectInfo> {
|
||||
const readable = await source.get(ctx, workspaceId, blob._id)
|
||||
const readable = await source.get(ctx, wsIds, blob._id)
|
||||
try {
|
||||
readable.on('end', () => {
|
||||
readable.destroy()
|
||||
})
|
||||
const stream = readable.pipe(new PassThrough())
|
||||
return await target.put(ctx, workspaceId, blob._id, stream, blob.contentType, blob.size)
|
||||
return await target.put(ctx, wsIds, blob._id, stream, blob.contentType, blob.size)
|
||||
} finally {
|
||||
readable.destroy()
|
||||
}
|
||||
@ -267,7 +268,7 @@ export interface CopyDatalakeParams {
|
||||
|
||||
export async function copyToDatalake (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
config: S3Config,
|
||||
adapter: S3Service,
|
||||
datalake: DatalakeClient,
|
||||
@ -275,7 +276,7 @@ export async function copyToDatalake (
|
||||
): Promise<void> {
|
||||
console.log('copying from', config.name, 'concurrency:', params.concurrency)
|
||||
|
||||
const exists = await adapter.exists(ctx, workspaceId)
|
||||
const exists = await adapter.exists(ctx, wsIds)
|
||||
if (!exists) {
|
||||
console.log('no files to copy')
|
||||
return
|
||||
@ -311,7 +312,7 @@ export async function copyToDatalake (
|
||||
let cursor: string | undefined = ''
|
||||
let hasMore = true
|
||||
while (hasMore) {
|
||||
const res = await datalake.listObjects(ctx, workspaceId, cursor, 1000)
|
||||
const res = await datalake.listObjects(ctx, wsIds.uuid, cursor, 1000)
|
||||
cursor = res.cursor
|
||||
hasMore = res.cursor !== undefined
|
||||
for (const blob of res.blobs) {
|
||||
@ -323,7 +324,7 @@ export async function copyToDatalake (
|
||||
|
||||
const rateLimiter = new RateLimiter(params.concurrency)
|
||||
|
||||
const iterator = await adapter.listStream(ctx, workspaceId)
|
||||
const iterator = await adapter.listStream(ctx, wsIds)
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
@ -349,7 +350,7 @@ export async function copyToDatalake (
|
||||
ctx,
|
||||
5,
|
||||
async () => {
|
||||
await copyBlobToDatalake(ctx, workspaceId, blob, config, adapter, datalake)
|
||||
await copyBlobToDatalake(ctx, wsIds, blob, config, adapter, datalake)
|
||||
processedCnt += 1
|
||||
processedSize += blob.size
|
||||
},
|
||||
@ -374,7 +375,7 @@ export async function copyToDatalake (
|
||||
|
||||
export async function copyBlobToDatalake (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
blob: ListBlobResult,
|
||||
config: S3Config,
|
||||
adapter: S3Service,
|
||||
@ -385,15 +386,15 @@ export async function copyBlobToDatalake (
|
||||
// Handle small file
|
||||
const { endpoint, accessKey: accessKeyId, secretKey: secretAccessKey, region } = config
|
||||
|
||||
const bucketId = adapter.getBucketId(workspaceId)
|
||||
const objectId = adapter.getDocumentKey(workspaceId, encodeURIComponent(objectName))
|
||||
const bucketId = adapter.getBucketId(wsIds)
|
||||
const objectId = adapter.getDocumentKey(wsIds, encodeURIComponent(objectName))
|
||||
const url = concatLink(endpoint, `${bucketId}/${objectId}`)
|
||||
|
||||
const params = { url, accessKeyId, secretAccessKey, region }
|
||||
await datalake.uploadFromS3(ctx, workspaceId, objectName, params)
|
||||
await datalake.uploadFromS3(ctx, wsIds.uuid, objectName, params)
|
||||
} else {
|
||||
// Handle huge file
|
||||
const stat = await adapter.stat(ctx, workspaceId, objectName)
|
||||
const stat = await adapter.stat(ctx, wsIds, objectName)
|
||||
if (stat !== undefined) {
|
||||
const metadata = {
|
||||
lastModified: stat.modifiedOn,
|
||||
@ -402,10 +403,10 @@ export async function copyBlobToDatalake (
|
||||
size: stat.size
|
||||
}
|
||||
|
||||
const readable = await adapter.get(ctx, workspaceId, objectName)
|
||||
const readable = await adapter.get(ctx, wsIds, objectName)
|
||||
try {
|
||||
console.log('uploading huge blob', objectName, Math.round(stat.size / 1024 / 1024), 'MB')
|
||||
await uploadMultipart(ctx, datalake, workspaceId, objectName, readable, metadata)
|
||||
await uploadMultipart(ctx, datalake, wsIds.uuid, objectName, readable, metadata)
|
||||
console.log('done', objectName)
|
||||
} finally {
|
||||
readable.destroy()
|
||||
@ -417,7 +418,7 @@ export async function copyBlobToDatalake (
|
||||
function uploadMultipart (
|
||||
ctx: MeasureContext,
|
||||
datalake: DatalakeClient,
|
||||
workspaceId: WorkspaceDataId,
|
||||
workspaceId: WorkspaceUuid,
|
||||
objectName: string,
|
||||
stream: Readable,
|
||||
metadata: UploadObjectParams
|
||||
|
@ -14,7 +14,7 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { DOMAIN_TX, type WorkspaceDataId, type MeasureContext, type Ref } from '@hcengineering/core'
|
||||
import { DOMAIN_TX, type WorkspaceDataId, type MeasureContext, type Ref, type WorkspaceIds } from '@hcengineering/core'
|
||||
import { DOMAIN_ATTACHMENT } from '@hcengineering/model-attachment'
|
||||
import contact, { DOMAIN_CHANNEL } from '@hcengineering/model-contact'
|
||||
import { DOMAIN_TELEGRAM } from '@hcengineering/model-telegram'
|
||||
@ -31,14 +31,15 @@ const LastMessages = 'last-msgs'
|
||||
export async function clearTelegramHistory (
|
||||
ctx: MeasureContext,
|
||||
mongoUrl: string,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
tgDb: string,
|
||||
storageAdapter: StorageAdapter
|
||||
): Promise<void> {
|
||||
const client = getMongoClient(mongoUrl)
|
||||
try {
|
||||
const _client = await client.getClient()
|
||||
const workspaceDB = getWorkspaceMongoDB(_client, workspaceId)
|
||||
const dataId = wsIds.dataId ?? (wsIds.uuid as unknown as WorkspaceDataId)
|
||||
const workspaceDB = getWorkspaceMongoDB(_client, dataId)
|
||||
const telegramDB = _client.db(tgDb)
|
||||
|
||||
const sharedMessages = await workspaceDB
|
||||
@ -91,12 +92,12 @@ export async function clearTelegramHistory (
|
||||
workspaceDB.collection(DOMAIN_ATTACHMENT).deleteMany({
|
||||
attachedToClass: telegram.class.Message
|
||||
}),
|
||||
storageAdapter.remove(ctx, workspaceId, Array.from(attachments))
|
||||
storageAdapter.remove(ctx, wsIds, Array.from(attachments))
|
||||
])
|
||||
|
||||
console.log('clearing telegram service data...')
|
||||
await telegramDB.collection(LastMessages).deleteMany({
|
||||
workspace: workspaceId
|
||||
workspace: wsIds.uuid
|
||||
})
|
||||
} finally {
|
||||
client.close()
|
||||
|
@ -34,8 +34,7 @@ import {
|
||||
type Ref,
|
||||
SortingOrder,
|
||||
toIdMap,
|
||||
TxOperations,
|
||||
type WorkspaceDataId
|
||||
TxOperations
|
||||
} from '@hcengineering/core'
|
||||
import {
|
||||
createDefaultSpace,
|
||||
@ -299,8 +298,7 @@ async function migrateDocSections (client: MigrationClient): Promise<void> {
|
||||
// Migrate sections headers + content
|
||||
try {
|
||||
const collabId = makeDocCollabId(document, 'content')
|
||||
const dataId = client.wsIds.dataId ?? (client.wsIds.uuid as unknown as WorkspaceDataId)
|
||||
const ydoc = await loadCollabYdoc(ctx, storage, dataId, collabId)
|
||||
const ydoc = await loadCollabYdoc(ctx, storage, client.wsIds, collabId)
|
||||
if (ydoc === undefined) {
|
||||
// no content, ignore
|
||||
continue
|
||||
@ -346,7 +344,7 @@ async function migrateDocSections (client: MigrationClient): Promise<void> {
|
||||
}
|
||||
})
|
||||
|
||||
await saveCollabYdoc(ctx, storage, dataId, collabId, ydoc)
|
||||
await saveCollabYdoc(ctx, storage, client.wsIds, collabId, ydoc)
|
||||
} catch (err) {
|
||||
ctx.error('error collaborative document content migration', { error: err, document: document.title })
|
||||
}
|
||||
|
@ -46,8 +46,7 @@ import core, {
|
||||
toIdMap,
|
||||
type TypedSpace,
|
||||
TxProcessor,
|
||||
type SocialKey,
|
||||
type WorkspaceDataId
|
||||
type SocialKey
|
||||
} from '@hcengineering/core'
|
||||
import {
|
||||
createDefaultSpace,
|
||||
@ -233,14 +232,7 @@ async function processMigrateContentFor (
|
||||
if (value != null && value.startsWith('{')) {
|
||||
try {
|
||||
const buffer = Buffer.from(value)
|
||||
await storageAdapter.put(
|
||||
ctx,
|
||||
client.wsIds.dataId ?? (client.wsIds.uuid as unknown as WorkspaceDataId),
|
||||
blobId,
|
||||
buffer,
|
||||
'application/json',
|
||||
buffer.length
|
||||
)
|
||||
await storageAdapter.put(ctx, client.wsIds, blobId, buffer, 'application/json', buffer.length)
|
||||
} catch (err) {
|
||||
ctx.error('failed to process document', { _class: doc._class, _id: doc._id, err })
|
||||
}
|
||||
@ -591,11 +583,10 @@ async function processMigrateJsonForDoc (
|
||||
: attribute.name
|
||||
|
||||
const collabId = makeDocCollabId(doc, attribute.name)
|
||||
const dataId = wsIds.dataId ?? (wsIds.uuid as unknown as WorkspaceDataId)
|
||||
if (value.startsWith('{')) {
|
||||
// For some reason we have documents that are already markups
|
||||
const jsonId = await retry(5, async () => {
|
||||
return await saveCollabJson(ctx, storageAdapter, dataId, collabId, value)
|
||||
return await saveCollabJson(ctx, storageAdapter, wsIds, collabId, value)
|
||||
})
|
||||
|
||||
update[attributeName] = jsonId
|
||||
@ -617,17 +608,17 @@ async function processMigrateJsonForDoc (
|
||||
const ydocId = makeCollabYdocId(collabId)
|
||||
if (ydocId !== currentYdocId) {
|
||||
await retry(5, async () => {
|
||||
const stat = await storageAdapter.stat(ctx, dataId, currentYdocId)
|
||||
const stat = await storageAdapter.stat(ctx, wsIds, currentYdocId)
|
||||
if (stat !== undefined) {
|
||||
const data = await storageAdapter.read(ctx, dataId, currentYdocId)
|
||||
const data = await storageAdapter.read(ctx, wsIds, currentYdocId)
|
||||
const buffer = Buffer.concat(data as any)
|
||||
await storageAdapter.put(ctx, dataId, ydocId, buffer, 'application/ydoc', buffer.length)
|
||||
await storageAdapter.put(ctx, wsIds, ydocId, buffer, 'application/ydoc', buffer.length)
|
||||
}
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
const error = err instanceof Error ? err.message : String(err)
|
||||
ctx.warn('failed to process collaborative doc', { dataId, collabId, currentYdocId, error })
|
||||
ctx.warn('failed to process collaborative doc', { workspace: wsIds.uuid, collabId, currentYdocId, error })
|
||||
}
|
||||
|
||||
const unset = update.$unset ?? {}
|
||||
|
@ -22,8 +22,7 @@ import {
|
||||
SortingOrder,
|
||||
type Class,
|
||||
type CollaborativeDoc,
|
||||
type Doc,
|
||||
type WorkspaceDataId
|
||||
type Doc
|
||||
} from '@hcengineering/core'
|
||||
import { type Document, type DocumentSnapshot, type Teamspace } from '@hcengineering/document'
|
||||
import {
|
||||
@ -209,8 +208,7 @@ async function renameFieldsRevert (client: MigrationClient): Promise<void> {
|
||||
|
||||
try {
|
||||
const collabId = makeDocCollabId(document, 'content')
|
||||
const dataId = client.wsIds.dataId ?? (client.wsIds.uuid as unknown as WorkspaceDataId)
|
||||
const ydoc = await loadCollabYdoc(ctx, storage, dataId, collabId)
|
||||
const ydoc = await loadCollabYdoc(ctx, storage, client.wsIds, collabId)
|
||||
if (ydoc === undefined) {
|
||||
continue
|
||||
}
|
||||
@ -221,7 +219,7 @@ async function renameFieldsRevert (client: MigrationClient): Promise<void> {
|
||||
|
||||
yDocCopyXmlField(ydoc, 'description', 'content')
|
||||
|
||||
await saveCollabYdoc(ctx, storage, dataId, collabId, ydoc)
|
||||
await saveCollabYdoc(ctx, storage, client.wsIds, collabId, ydoc)
|
||||
} catch (err) {
|
||||
ctx.error('error document content migration', { error: err, document: document.title })
|
||||
}
|
||||
@ -257,8 +255,7 @@ async function restoreContentField (client: MigrationClient): Promise<void> {
|
||||
for (const document of documents) {
|
||||
try {
|
||||
const collabId = makeDocCollabId(document, 'content')
|
||||
const dataId = client.wsIds.dataId ?? (client.wsIds.uuid as unknown as WorkspaceDataId)
|
||||
const ydoc = await loadCollabYdoc(ctx, storage, dataId, collabId)
|
||||
const ydoc = await loadCollabYdoc(ctx, storage, client.wsIds, collabId)
|
||||
if (ydoc === undefined) {
|
||||
ctx.error('document content not found', { document: document.title })
|
||||
continue
|
||||
@ -272,7 +269,7 @@ async function restoreContentField (client: MigrationClient): Promise<void> {
|
||||
if (ydoc.share.has('')) {
|
||||
yDocCopyXmlField(ydoc, '', 'content')
|
||||
if (ydoc.share.has('content')) {
|
||||
await saveCollabYdoc(ctx, storage, dataId, collabId, ydoc)
|
||||
await saveCollabYdoc(ctx, storage, client.wsIds, collabId, ydoc)
|
||||
} else {
|
||||
ctx.error('document content still not found', { document: document.title })
|
||||
}
|
||||
|
@ -13,15 +13,7 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
import { saveCollabJson } from '@hcengineering/collaboration'
|
||||
import {
|
||||
CollaborativeDoc,
|
||||
Markup,
|
||||
MeasureContext,
|
||||
Blob as PlatformBlob,
|
||||
Ref,
|
||||
WorkspaceDataId,
|
||||
WorkspaceIds
|
||||
} from '@hcengineering/core'
|
||||
import { CollaborativeDoc, Markup, MeasureContext, Blob as PlatformBlob, Ref, WorkspaceIds } from '@hcengineering/core'
|
||||
import type { StorageAdapter } from '@hcengineering/server-core'
|
||||
import { FileUploader, UploadResult } from './uploader'
|
||||
|
||||
@ -34,16 +26,12 @@ export class StorageFileUploader implements FileUploader {
|
||||
this.uploadFile = this.uploadFile.bind(this)
|
||||
}
|
||||
|
||||
get dataId (): WorkspaceDataId {
|
||||
return this.wsIds.dataId ?? (this.wsIds.uuid as unknown as WorkspaceDataId)
|
||||
}
|
||||
|
||||
public async uploadFile (id: string, blob: Blob): Promise<UploadResult> {
|
||||
try {
|
||||
const arrayBuffer = await blob.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
|
||||
await this.storageAdapter.put(this.ctx, this.dataId, id, buffer, blob.type, buffer.byteLength)
|
||||
await this.storageAdapter.put(this.ctx, this.wsIds, id, buffer, blob.type, buffer.byteLength)
|
||||
return { success: true, id: id as Ref<PlatformBlob> }
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
@ -52,7 +40,7 @@ export class StorageFileUploader implements FileUploader {
|
||||
|
||||
public async uploadCollaborativeDoc (collabId: CollaborativeDoc, content: Markup): Promise<UploadResult> {
|
||||
try {
|
||||
const blobId = await saveCollabJson(this.ctx, this.storageAdapter, this.dataId, collabId, content)
|
||||
const blobId = await saveCollabJson(this.ctx, this.storageAdapter, this.wsIds, collabId, content)
|
||||
return { success: true, id: blobId }
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
|
@ -13,7 +13,13 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { type Blob, type MeasureContext, type StorageIterator, type WorkspaceDataId } from '@hcengineering/core'
|
||||
import {
|
||||
type WorkspaceIds,
|
||||
type Blob,
|
||||
type MeasureContext,
|
||||
type StorageIterator,
|
||||
type WorkspaceDataId
|
||||
} from '@hcengineering/core'
|
||||
import { PlatformError, unknownError } from '@hcengineering/platform'
|
||||
import { type Readable } from 'stream'
|
||||
|
||||
@ -35,37 +41,37 @@ export interface BucketInfo {
|
||||
}
|
||||
|
||||
export interface StorageAdapter {
|
||||
initialize: (ctx: MeasureContext, dataId: WorkspaceDataId) => Promise<void>
|
||||
initialize: (ctx: MeasureContext, wsIds: WorkspaceIds) => Promise<void>
|
||||
|
||||
close: () => Promise<void>
|
||||
|
||||
exists: (ctx: MeasureContext, dataId: WorkspaceDataId) => Promise<boolean>
|
||||
make: (ctx: MeasureContext, dataId: WorkspaceDataId) => Promise<void>
|
||||
delete: (ctx: MeasureContext, dataId: WorkspaceDataId) => Promise<void>
|
||||
exists: (ctx: MeasureContext, wsIds: WorkspaceIds) => Promise<boolean>
|
||||
make: (ctx: MeasureContext, wsIds: WorkspaceIds) => Promise<void>
|
||||
delete: (ctx: MeasureContext, wsIds: WorkspaceIds) => Promise<void>
|
||||
|
||||
listBuckets: (ctx: MeasureContext) => Promise<BucketInfo[]>
|
||||
remove: (ctx: MeasureContext, dataId: WorkspaceDataId, objectNames: string[]) => Promise<void>
|
||||
listStream: (ctx: MeasureContext, dataId: WorkspaceDataId) => Promise<BlobStorageIterator>
|
||||
stat: (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string) => Promise<Blob | undefined>
|
||||
get: (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string) => Promise<Readable>
|
||||
remove: (ctx: MeasureContext, wsIds: WorkspaceIds, objectNames: string[]) => Promise<void>
|
||||
listStream: (ctx: MeasureContext, wsIds: WorkspaceIds) => Promise<BlobStorageIterator>
|
||||
stat: (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string) => Promise<Blob | undefined>
|
||||
get: (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string) => Promise<Readable>
|
||||
put: (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
stream: Readable | Buffer | string,
|
||||
contentType: string,
|
||||
size?: number
|
||||
) => Promise<UploadedObjectInfo>
|
||||
read: (ctx: MeasureContext, dataId: WorkspaceDataId, name: string) => Promise<Buffer[]>
|
||||
read: (ctx: MeasureContext, wsIds: WorkspaceIds, name: string) => Promise<Buffer[]>
|
||||
partial: (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
offset: number,
|
||||
length?: number
|
||||
) => Promise<Readable>
|
||||
|
||||
getUrl: (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string) => Promise<string>
|
||||
getUrl: (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string) => Promise<string>
|
||||
}
|
||||
|
||||
export interface NamedStorageAdapter {
|
||||
@ -76,7 +82,7 @@ export interface NamedStorageAdapter {
|
||||
export interface StorageAdapterEx extends StorageAdapter {
|
||||
adapters?: NamedStorageAdapter[]
|
||||
|
||||
find: (ctx: MeasureContext, dataId: WorkspaceDataId) => StorageIterator
|
||||
find: (ctx: MeasureContext, wsIds: WorkspaceIds) => StorageIterator
|
||||
}
|
||||
|
||||
/**
|
||||
@ -84,19 +90,19 @@ export interface StorageAdapterEx extends StorageAdapter {
|
||||
*/
|
||||
export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
|
||||
defaultAdapter: string = ''
|
||||
async syncBlobFromStorage (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Blob> {
|
||||
async syncBlobFromStorage (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Blob> {
|
||||
throw new PlatformError(unknownError('Method not implemented'))
|
||||
}
|
||||
|
||||
async initialize (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {}
|
||||
async initialize (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {}
|
||||
|
||||
async close (): Promise<void> {}
|
||||
|
||||
async exists (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<boolean> {
|
||||
async exists (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<boolean> {
|
||||
return false
|
||||
}
|
||||
|
||||
find (ctx: MeasureContext, dataId: WorkspaceDataId): StorageIterator {
|
||||
find (ctx: MeasureContext, wsIds: WorkspaceIds): StorageIterator {
|
||||
return {
|
||||
next: async (ctx) => [],
|
||||
close: async (ctx) => {}
|
||||
@ -107,17 +113,17 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
|
||||
return []
|
||||
}
|
||||
|
||||
async make (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {}
|
||||
async make (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {}
|
||||
|
||||
async delete (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {}
|
||||
async delete (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {}
|
||||
|
||||
async remove (ctx: MeasureContext, dataId: WorkspaceDataId, objectNames: string[]): Promise<void> {}
|
||||
async remove (ctx: MeasureContext, wsIds: WorkspaceIds, objectNames: string[]): Promise<void> {}
|
||||
|
||||
async list (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<ListBlobResult[]> {
|
||||
async list (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<ListBlobResult[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
async listStream (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<BlobStorageIterator> {
|
||||
async listStream (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<BlobStorageIterator> {
|
||||
return {
|
||||
next: async (): Promise<ListBlobResult[]> => {
|
||||
return []
|
||||
@ -126,17 +132,17 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
|
||||
}
|
||||
}
|
||||
|
||||
async stat (ctx: MeasureContext, dataId: WorkspaceDataId, name: string): Promise<Blob | undefined> {
|
||||
async stat (ctx: MeasureContext, wsIds: WorkspaceIds, name: string): Promise<Blob | undefined> {
|
||||
return undefined
|
||||
}
|
||||
|
||||
async get (ctx: MeasureContext, dataId: WorkspaceDataId, name: string): Promise<Readable> {
|
||||
async get (ctx: MeasureContext, wsIds: WorkspaceIds, name: string): Promise<Readable> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
async partial (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
offset: number,
|
||||
length?: number | undefined
|
||||
@ -144,13 +150,13 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
async read (ctx: MeasureContext, dataId: WorkspaceDataId, name: string): Promise<Buffer[]> {
|
||||
async read (ctx: MeasureContext, wsIds: WorkspaceIds, name: string): Promise<Buffer[]> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
async put (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
stream: string | Readable | Buffer,
|
||||
contentType: string,
|
||||
@ -159,7 +165,7 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
async getUrl (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<string> {
|
||||
async getUrl (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<string> {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
}
|
||||
@ -171,11 +177,11 @@ export function createDummyStorageAdapter (): StorageAdapter {
|
||||
export async function removeAllObjects (
|
||||
ctx: MeasureContext,
|
||||
storage: StorageAdapter,
|
||||
dataId: WorkspaceDataId
|
||||
wsIds: WorkspaceIds
|
||||
): Promise<void> {
|
||||
ctx.warn('removing all objects from workspace', { dataId })
|
||||
ctx.warn('removing all objects from workspace', wsIds)
|
||||
// We need to list all files and delete them
|
||||
const iterator = await storage.listStream(ctx, dataId)
|
||||
const iterator = await storage.listStream(ctx, wsIds)
|
||||
let bulk: string[] = []
|
||||
while (true) {
|
||||
const objs = await iterator.next()
|
||||
@ -185,13 +191,13 @@ export async function removeAllObjects (
|
||||
for (const obj of objs) {
|
||||
bulk.push(obj._id)
|
||||
if (bulk.length > 50) {
|
||||
await storage.remove(ctx, dataId, bulk)
|
||||
await storage.remove(ctx, wsIds, bulk)
|
||||
bulk = []
|
||||
}
|
||||
}
|
||||
}
|
||||
if (bulk.length > 0) {
|
||||
await storage.remove(ctx, dataId, bulk)
|
||||
await storage.remove(ctx, wsIds, bulk)
|
||||
bulk = []
|
||||
}
|
||||
await iterator.close()
|
||||
@ -200,10 +206,10 @@ export async function removeAllObjects (
|
||||
export async function objectsToArray (
|
||||
ctx: MeasureContext,
|
||||
storage: StorageAdapter,
|
||||
dataId: WorkspaceDataId
|
||||
wsIds: WorkspaceIds
|
||||
): Promise<ListBlobResult[]> {
|
||||
// We need to list all files and delete them
|
||||
const iterator = await storage.listStream(ctx, dataId)
|
||||
const iterator = await storage.listStream(ctx, wsIds)
|
||||
const bulk: ListBlobResult[] = []
|
||||
while (true) {
|
||||
const obj = await iterator.next()
|
||||
@ -215,3 +221,7 @@ export async function objectsToArray (
|
||||
await iterator.close()
|
||||
return bulk
|
||||
}
|
||||
|
||||
export function getDataId (wsIds: WorkspaceIds): WorkspaceDataId {
|
||||
return wsIds.dataId ?? (wsIds.uuid as unknown as WorkspaceDataId)
|
||||
}
|
||||
|
@ -37,8 +37,7 @@ import core, {
|
||||
TxRemoveDoc,
|
||||
TxUpdateDoc,
|
||||
Type,
|
||||
type MeasureContext,
|
||||
type WorkspaceDataId
|
||||
type MeasureContext
|
||||
} from '@hcengineering/core'
|
||||
import notification, { CommonInboxNotification, MentionInboxNotification } from '@hcengineering/notification'
|
||||
import { StorageAdapter, TriggerControl } from '@hcengineering/server-core'
|
||||
@ -402,8 +401,7 @@ async function getCreateReferencesTxes (
|
||||
const blobId = (createdDoc as any)[attr.name] as Ref<Blob>
|
||||
if (blobId != null) {
|
||||
try {
|
||||
const dataId = control.workspace.dataId ?? (control.workspace.uuid as unknown as WorkspaceDataId)
|
||||
const buffer = await storage.read(ctx, dataId, blobId)
|
||||
const buffer = await storage.read(ctx, control.workspace, blobId)
|
||||
const markup = Buffer.concat(buffer as any).toString()
|
||||
const attrReferences = getReferencesData(srcDocId, srcDocClass, attachedDocId, attachedDocClass, markup)
|
||||
refs.push(...attrReferences)
|
||||
@ -450,8 +448,7 @@ async function getUpdateReferencesTxes (
|
||||
try {
|
||||
const blobId = (updatedDoc as any)[attr.name] as Ref<Blob>
|
||||
if (blobId != null) {
|
||||
const dataId = control.workspace.dataId ?? (control.workspace.uuid as unknown as WorkspaceDataId)
|
||||
const buffer = await storage.read(ctx, dataId, blobId)
|
||||
const buffer = await storage.read(ctx, control.workspace, blobId)
|
||||
const markup = Buffer.concat(buffer as any).toString()
|
||||
const attrReferences = getReferencesData(srcDocId, srcDocClass, attachedDocId, attachedDocClass, markup)
|
||||
references.push(...attrReferences)
|
||||
|
@ -15,7 +15,7 @@
|
||||
//
|
||||
|
||||
import attachment, { type Attachment } from '@hcengineering/attachment'
|
||||
import type { Tx, TxRemoveDoc, WorkspaceDataId } from '@hcengineering/core'
|
||||
import type { Tx, TxRemoveDoc } from '@hcengineering/core'
|
||||
import type { TriggerControl } from '@hcengineering/server-core'
|
||||
|
||||
/**
|
||||
@ -45,8 +45,7 @@ export async function OnAttachmentDelete (
|
||||
}
|
||||
}
|
||||
if (toDelete.length > 0) {
|
||||
const dataId = workspace.dataId ?? (workspace.uuid as unknown as WorkspaceDataId)
|
||||
await storageAdapter.remove(ctx, dataId, toDelete)
|
||||
await storageAdapter.remove(ctx, workspace, toDelete)
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -13,7 +13,7 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import type { CollaborativeDoc, Doc, Tx, TxRemoveDoc, WorkspaceDataId } from '@hcengineering/core'
|
||||
import type { CollaborativeDoc, Doc, Tx, TxRemoveDoc } from '@hcengineering/core'
|
||||
import core, { makeCollabId, makeCollabYdocId } from '@hcengineering/core'
|
||||
import { type TriggerControl } from '@hcengineering/server-core'
|
||||
|
||||
@ -49,8 +49,7 @@ export async function OnDelete (
|
||||
const toRemove: string[] = toDelete.map(makeCollabYdocId)
|
||||
if (toRemove.length > 0) {
|
||||
await ctx.with('remove', {}, async () => {
|
||||
const dataId = workspace.dataId ?? (workspace.uuid as unknown as WorkspaceDataId)
|
||||
await storageAdapter.remove(ctx, dataId, toRemove)
|
||||
await storageAdapter.remove(ctx, workspace, toRemove)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -22,8 +22,7 @@ import {
|
||||
type TxRemoveDoc,
|
||||
DocumentQuery,
|
||||
FindOptions,
|
||||
FindResult,
|
||||
type WorkspaceDataId
|
||||
FindResult
|
||||
} from '@hcengineering/core'
|
||||
import drive, { type FileVersion, type Folder } from '@hcengineering/drive'
|
||||
import type { TriggerControl } from '@hcengineering/server-core'
|
||||
@ -45,8 +44,7 @@ export async function OnFileVersionDelete (
|
||||
}
|
||||
}
|
||||
if (toDelete.length > 0) {
|
||||
const dataId = workspace.dataId ?? (workspace.uuid as unknown as WorkspaceDataId)
|
||||
await storageAdapter.remove(ctx, dataId, toDelete)
|
||||
await storageAdapter.remove(ctx, workspace, toDelete)
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -25,8 +25,7 @@ import {
|
||||
Ref,
|
||||
Tx,
|
||||
TxCreateDoc,
|
||||
TxProcessor,
|
||||
type WorkspaceDataId
|
||||
TxProcessor
|
||||
} from '@hcengineering/core'
|
||||
import notification, {
|
||||
ActivityInboxNotification,
|
||||
@ -179,8 +178,7 @@ export async function createPushNotification (
|
||||
const provider = getAvatarProviderId(senderAvatar.avatarType)
|
||||
if (provider === contact.avatarProvider.Image) {
|
||||
if (senderAvatar.avatar != null) {
|
||||
const dataId = control.workspace.dataId ?? (control.workspace.uuid as unknown as WorkspaceDataId)
|
||||
const url = await control.storageAdapter.getUrl(control.ctx, dataId, senderAvatar.avatar)
|
||||
const url = await control.storageAdapter.getUrl(control.ctx, control.workspace, senderAvatar.avatar)
|
||||
data.icon = url.includes('://') ? url : concatLink(front, url)
|
||||
}
|
||||
} else if (provider === contact.avatarProvider.Gravatar && senderAvatar.avatarProps?.url !== undefined) {
|
||||
|
@ -30,7 +30,8 @@ import core, {
|
||||
type Tx,
|
||||
type WorkspaceIds,
|
||||
type WorkspaceInfoWithStatus,
|
||||
WorkspaceDataId
|
||||
WorkspaceDataId,
|
||||
WorkspaceUuid
|
||||
} from '@hcengineering/core'
|
||||
import {
|
||||
wrapPipeline,
|
||||
@ -264,13 +265,13 @@ class BackupWorker {
|
||||
const ctx = rootCtx.newChild(ws.uuid, { workspace: ws.uuid, url: ws.url })
|
||||
const dataId = ws.dataId ?? (ws.uuid as unknown as WorkspaceDataId)
|
||||
let pipeline: Pipeline | undefined
|
||||
const backupIds = {
|
||||
uuid: this.config.BucketName as WorkspaceUuid,
|
||||
dataId: this.config.BucketName as WorkspaceDataId,
|
||||
url: ''
|
||||
}
|
||||
try {
|
||||
const storage = await createStorageBackupStorage(
|
||||
ctx,
|
||||
this.storageAdapter,
|
||||
this.config.BucketName as WorkspaceDataId,
|
||||
dataId
|
||||
)
|
||||
const storage = await createStorageBackupStorage(ctx, this.storageAdapter, backupIds, dataId)
|
||||
const wsIds: WorkspaceIds = {
|
||||
uuid: ws.uuid,
|
||||
dataId: ws.dataId,
|
||||
@ -442,7 +443,8 @@ export async function doRestoreWorkspace (
|
||||
const ctx = rootCtx.newChild(wsIds.uuid, { workspace: wsIds.uuid })
|
||||
let pipeline: Pipeline | undefined
|
||||
try {
|
||||
const storage = await createStorageBackupStorage(ctx, backupAdapter, bucketName as WorkspaceDataId, wsIds.uuid)
|
||||
const restoreIds = { uuid: bucketName as WorkspaceUuid, dataId: bucketName as WorkspaceDataId, url: '' }
|
||||
const storage = await createStorageBackupStorage(ctx, backupAdapter, restoreIds, wsIds.uuid)
|
||||
const result: boolean = await ctx.with('restore', { workspace: wsIds.uuid }, (ctx) =>
|
||||
restore(ctx, '', wsIds, storage, {
|
||||
date: -1,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { MeasureContext, type WorkspaceDataId } from '@hcengineering/core'
|
||||
import { MeasureContext, type WorkspaceIds } from '@hcengineering/core'
|
||||
import { StorageAdapter } from '@hcengineering/server-core'
|
||||
import { createReadStream, createWriteStream, existsSync, statSync } from 'fs'
|
||||
import { mkdir, readFile, rm, writeFile } from 'fs/promises'
|
||||
@ -66,29 +66,29 @@ class FileStorage implements BackupStorage {
|
||||
class AdapterStorage implements BackupStorage {
|
||||
constructor (
|
||||
readonly client: StorageAdapter,
|
||||
readonly workspaceId: WorkspaceDataId,
|
||||
readonly wsIds: WorkspaceIds,
|
||||
readonly root: string,
|
||||
readonly ctx: MeasureContext
|
||||
) {}
|
||||
|
||||
async loadFile (name: string): Promise<Buffer> {
|
||||
const data = await this.client.read(this.ctx, this.workspaceId, join(this.root, name))
|
||||
const data = await this.client.read(this.ctx, this.wsIds, join(this.root, name))
|
||||
return Buffer.concat(data as any)
|
||||
}
|
||||
|
||||
async write (name: string): Promise<Writable> {
|
||||
const wr = new PassThrough()
|
||||
void this.client.put(this.ctx, this.workspaceId, join(this.root, name), wr, 'application/octet-stream')
|
||||
void this.client.put(this.ctx, this.wsIds, join(this.root, name), wr, 'application/octet-stream')
|
||||
return wr
|
||||
}
|
||||
|
||||
async load (name: string): Promise<Readable> {
|
||||
return await this.client.get(this.ctx, this.workspaceId, join(this.root, name))
|
||||
return await this.client.get(this.ctx, this.wsIds, join(this.root, name))
|
||||
}
|
||||
|
||||
async exists (name: string): Promise<boolean> {
|
||||
try {
|
||||
return (await this.client.stat(this.ctx, this.workspaceId, join(this.root, name))) !== undefined
|
||||
return (await this.client.stat(this.ctx, this.wsIds, join(this.root, name))) !== undefined
|
||||
} catch (err: any) {
|
||||
return false
|
||||
}
|
||||
@ -96,7 +96,7 @@ class AdapterStorage implements BackupStorage {
|
||||
|
||||
async stat (name: string): Promise<number> {
|
||||
try {
|
||||
const st = await this.client.stat(this.ctx, this.workspaceId, join(this.root, name))
|
||||
const st = await this.client.stat(this.ctx, this.wsIds, join(this.root, name))
|
||||
return st?.size ?? 0
|
||||
} catch (err: any) {
|
||||
return 0
|
||||
@ -104,12 +104,12 @@ class AdapterStorage implements BackupStorage {
|
||||
}
|
||||
|
||||
async delete (name: string): Promise<void> {
|
||||
await this.client.remove(this.ctx, this.workspaceId, [join(this.root, name)])
|
||||
await this.client.remove(this.ctx, this.wsIds, [join(this.root, name)])
|
||||
}
|
||||
|
||||
async writeFile (name: string, data: string | Buffer | Readable): Promise<void> {
|
||||
// TODO: add mime type detection here.
|
||||
await this.client.put(this.ctx, this.workspaceId, join(this.root, name), data, 'application/octet-stream')
|
||||
await this.client.put(this.ctx, this.wsIds, join(this.root, name), data, 'application/octet-stream')
|
||||
}
|
||||
}
|
||||
|
||||
@ -130,11 +130,11 @@ export async function createFileBackupStorage (fileName: string): Promise<Backup
|
||||
export async function createStorageBackupStorage (
|
||||
ctx: MeasureContext,
|
||||
client: StorageAdapter,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
root: string
|
||||
): Promise<BackupStorage> {
|
||||
if (!(await client.exists(ctx, workspaceId))) {
|
||||
await client.make(ctx, workspaceId)
|
||||
if (!(await client.exists(ctx, wsIds))) {
|
||||
await client.make(ctx, wsIds)
|
||||
}
|
||||
return new AdapterStorage(client, workspaceId, root, ctx)
|
||||
return new AdapterStorage(client, wsIds, root, ctx)
|
||||
}
|
||||
|
@ -13,7 +13,7 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { type WorkspaceDataId, type WorkspaceIds, type MeasureContext } from '@hcengineering/core'
|
||||
import { type WorkspaceIds, type MeasureContext } from '@hcengineering/core'
|
||||
import type { StorageAdapter } from '@hcengineering/server-core'
|
||||
import { Buffer } from 'node:buffer'
|
||||
|
||||
@ -38,13 +38,9 @@ export class BlobClient {
|
||||
this.transactorAPIUrl = url.replaceAll('wss://', 'https://').replace('ws://', 'http://') + '/api/v1/blob'
|
||||
}
|
||||
|
||||
get workspaceDataId (): WorkspaceDataId {
|
||||
return this.workspace.dataId ?? (this.workspace.uuid as unknown as WorkspaceDataId)
|
||||
}
|
||||
|
||||
async checkFile (ctx: MeasureContext, name: string): Promise<boolean> {
|
||||
if (this.opt?.storageAdapter !== undefined) {
|
||||
const obj = await this.opt?.storageAdapter.stat(ctx, this.workspaceDataId, name)
|
||||
const obj = await this.opt?.storageAdapter.stat(ctx, this.workspace, name)
|
||||
if (obj !== undefined) {
|
||||
return true
|
||||
}
|
||||
@ -96,7 +92,7 @@ export class BlobClient {
|
||||
|
||||
if (this.opt?.storageAdapter !== undefined) {
|
||||
const chunks: Buffer[] = []
|
||||
const readable = await this.opt.storageAdapter.partial(ctx, this.workspaceDataId, name, written, chunkSize)
|
||||
const readable = await this.opt.storageAdapter.partial(ctx, this.workspace, name, written, chunkSize)
|
||||
await new Promise<void>((resolve) => {
|
||||
readable.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
@ -123,12 +119,12 @@ export class BlobClient {
|
||||
if (response.status === 403) {
|
||||
i = 5
|
||||
// No file, so make it empty
|
||||
throw new Error(`Unauthorized ${this.transactorAPIUrl}/${this.workspaceDataId}/${name}`)
|
||||
throw new Error(`Unauthorized ${this.transactorAPIUrl}/${this.workspace.uuid}/${name}`)
|
||||
}
|
||||
if (response.status === 404) {
|
||||
i = 5
|
||||
// No file, so make it empty
|
||||
throw new Error(`No file for ${this.transactorAPIUrl}/${this.workspaceDataId}/${name}`)
|
||||
throw new Error(`No file for ${this.transactorAPIUrl}/${this.workspace.uuid}/${name}`)
|
||||
}
|
||||
if (response.status === 416) {
|
||||
if (size === -1) {
|
||||
@ -137,7 +133,7 @@ export class BlobClient {
|
||||
}
|
||||
|
||||
// No file, so make it empty
|
||||
throw new Error(`No file for ${this.transactorAPIUrl}/${this.workspaceDataId}/${name}`)
|
||||
throw new Error(`No file for ${this.transactorAPIUrl}/${this.workspace.uuid}/${name}`)
|
||||
}
|
||||
chunk = Buffer.from(await response.arrayBuffer())
|
||||
|
||||
@ -205,7 +201,7 @@ export class BlobClient {
|
||||
|
||||
async upload (ctx: MeasureContext, name: string, size: number, contentType: string, buffer: Buffer): Promise<void> {
|
||||
if (this.opt?.storageAdapter !== undefined) {
|
||||
await this.opt.storageAdapter.put(ctx, this.workspaceDataId, name, buffer, contentType, size)
|
||||
await this.opt.storageAdapter.put(ctx, this.workspace, name, buffer, contentType, size)
|
||||
} else {
|
||||
// TODO: We need to improve this logig, to allow restore of huge blobs
|
||||
for (let i = 0; i < 5; i++) {
|
||||
|
@ -17,7 +17,7 @@ import {
|
||||
type Blob,
|
||||
type CollaborativeDoc,
|
||||
type Ref,
|
||||
type WorkspaceDataId,
|
||||
type WorkspaceIds,
|
||||
Markup,
|
||||
MarkupBlobRef,
|
||||
MeasureContext,
|
||||
@ -35,12 +35,12 @@ import { yDocFromBuffer, yDocToBuffer } from './ydoc'
|
||||
export async function loadCollabYdoc (
|
||||
ctx: MeasureContext,
|
||||
storageAdapter: StorageAdapter,
|
||||
workspace: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
doc: CollaborativeDoc | MarkupBlobRef
|
||||
): Promise<YDoc | undefined> {
|
||||
const blobId = typeof doc === 'string' ? doc : makeCollabYdocId(doc)
|
||||
|
||||
const blob = await storageAdapter.stat(ctx, workspace, blobId)
|
||||
const blob = await storageAdapter.stat(ctx, wsIds, blobId)
|
||||
if (blob === undefined) {
|
||||
return undefined
|
||||
}
|
||||
@ -53,7 +53,7 @@ export async function loadCollabYdoc (
|
||||
// it is either already gc-ed, or gc not needed and it is disabled
|
||||
const ydoc = new YDoc({ guid: generateId(), gc: false })
|
||||
|
||||
const buffer = await storageAdapter.read(ctx, workspace, blobId)
|
||||
const buffer = await storageAdapter.read(ctx, wsIds, blobId)
|
||||
return yDocFromBuffer(Buffer.concat(buffer as any), ydoc)
|
||||
}
|
||||
|
||||
@ -61,14 +61,14 @@ export async function loadCollabYdoc (
|
||||
export async function saveCollabYdoc (
|
||||
ctx: MeasureContext,
|
||||
storageAdapter: StorageAdapter,
|
||||
workspace: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
doc: CollaborativeDoc | MarkupBlobRef,
|
||||
ydoc: YDoc
|
||||
): Promise<Ref<Blob>> {
|
||||
const blobId = typeof doc === 'string' ? doc : makeCollabYdocId(doc)
|
||||
|
||||
const buffer = yDocToBuffer(ydoc)
|
||||
await storageAdapter.put(ctx, workspace, blobId, buffer, 'application/ydoc', buffer.length)
|
||||
await storageAdapter.put(ctx, wsIds, blobId, buffer, 'application/ydoc', buffer.length)
|
||||
|
||||
return blobId
|
||||
}
|
||||
@ -76,14 +76,14 @@ export async function saveCollabYdoc (
|
||||
/** @public */
|
||||
export async function removeCollabYdoc (
|
||||
storageAdapter: StorageAdapter,
|
||||
workspace: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
collaborativeDocs: CollaborativeDoc[],
|
||||
ctx: MeasureContext
|
||||
): Promise<void> {
|
||||
const toRemove: string[] = collaborativeDocs.map(makeCollabYdocId)
|
||||
if (toRemove.length > 0) {
|
||||
await ctx.with('remove', {}, async () => {
|
||||
await storageAdapter.remove(ctx, workspace, toRemove)
|
||||
await storageAdapter.remove(ctx, wsIds, toRemove)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -92,10 +92,10 @@ export async function removeCollabYdoc (
|
||||
export async function loadCollabJson (
|
||||
ctx: MeasureContext,
|
||||
storageAdapter: StorageAdapter,
|
||||
workspace: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
blobId: Ref<Blob>
|
||||
): Promise<Markup | undefined> {
|
||||
const blob = await storageAdapter.stat(ctx, workspace, blobId)
|
||||
const blob = await storageAdapter.stat(ctx, wsIds, blobId)
|
||||
if (blob === undefined) {
|
||||
return undefined
|
||||
}
|
||||
@ -105,7 +105,7 @@ export async function loadCollabJson (
|
||||
return undefined
|
||||
}
|
||||
|
||||
const buffer = await storageAdapter.read(ctx, workspace, blobId)
|
||||
const buffer = await storageAdapter.read(ctx, wsIds, blobId)
|
||||
return Buffer.concat(buffer as any).toString()
|
||||
}
|
||||
|
||||
@ -113,7 +113,7 @@ export async function loadCollabJson (
|
||||
export async function saveCollabJson (
|
||||
ctx: MeasureContext,
|
||||
storageAdapter: StorageAdapter,
|
||||
workspace: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
doc: CollaborativeDoc,
|
||||
content: Markup | YDoc
|
||||
): Promise<Ref<Blob>> {
|
||||
@ -121,7 +121,7 @@ export async function saveCollabJson (
|
||||
|
||||
const markup = typeof content === 'string' ? content : yDocToMarkup(content, doc.objectAttr)
|
||||
const buffer = Buffer.from(markup)
|
||||
await storageAdapter.put(ctx, workspace, blobId, buffer, 'application/json', buffer.length)
|
||||
await storageAdapter.put(ctx, wsIds, blobId, buffer, 'application/json', buffer.length)
|
||||
|
||||
return blobId
|
||||
}
|
||||
|
@ -13,15 +13,14 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { Blob, Ref, generateId, WorkspaceUuid, WorkspaceDataId } from '@hcengineering/core'
|
||||
import { Blob, Ref, generateId, type WorkspaceIds } from '@hcengineering/core'
|
||||
import { decodeToken } from '@hcengineering/server-token'
|
||||
import { onAuthenticatePayload } from '@hocuspocus/server'
|
||||
import { ClientFactory, simpleClientFactory } from './platform'
|
||||
|
||||
export interface Context {
|
||||
connectionId: string
|
||||
workspaceId: WorkspaceUuid
|
||||
workspaceDataId: WorkspaceDataId
|
||||
wsIds: WorkspaceIds
|
||||
clientFactory: ClientFactory
|
||||
|
||||
content?: Ref<Blob>
|
||||
@ -35,7 +34,7 @@ export type withContext<T extends WithContext> = Omit<T, 'context'> & {
|
||||
context: Context
|
||||
}
|
||||
|
||||
export function buildContext (data: onAuthenticatePayload, wsDataId?: WorkspaceDataId): Context {
|
||||
export function buildContext (data: onAuthenticatePayload, wsIds: WorkspaceIds): Context {
|
||||
const context = data.context as Partial<Context>
|
||||
|
||||
const connectionId = context.connectionId ?? generateId()
|
||||
@ -45,8 +44,7 @@ export function buildContext (data: onAuthenticatePayload, wsDataId?: WorkspaceD
|
||||
|
||||
return {
|
||||
connectionId,
|
||||
workspaceId: decodedToken.workspace,
|
||||
workspaceDataId: wsDataId ?? (decodedToken.workspace as unknown as WorkspaceDataId),
|
||||
wsIds,
|
||||
clientFactory: simpleClientFactory(decodedToken),
|
||||
content
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ export class AuthenticationExtension implements Extension {
|
||||
throw new Error('documentName must include workspace id')
|
||||
}
|
||||
|
||||
return buildContext(data, ids.dataId)
|
||||
return buildContext(data, ids)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ import {
|
||||
decodeDocumentId
|
||||
} from '@hcengineering/collaborator-client'
|
||||
import { saveCollabJson } from '@hcengineering/collaboration'
|
||||
import { type Blob, type Ref, MeasureContext, type WorkspaceDataId } from '@hcengineering/core'
|
||||
import { type Blob, type Ref, MeasureContext } from '@hcengineering/core'
|
||||
import { Context } from '../../context'
|
||||
import { RpcMethodParams } from '../rpc'
|
||||
|
||||
@ -37,12 +37,11 @@ export async function createContent (
|
||||
throw new Error(`Document ${documentName} already exists`)
|
||||
}
|
||||
|
||||
const { documentId, workspaceId } = decodeDocumentId(documentName)
|
||||
const { documentId } = decodeDocumentId(documentName)
|
||||
|
||||
const result: Record<string, Ref<Blob>> = {}
|
||||
const dataId = context.workspaceDataId ?? (workspaceId as WorkspaceDataId)
|
||||
for (const [field, markup] of Object.entries(content)) {
|
||||
const blob = await saveCollabJson(ctx, storageAdapter, dataId, documentId, markup)
|
||||
const blob = await saveCollabJson(ctx, storageAdapter, context.wsIds, documentId, markup)
|
||||
result[field] = blob
|
||||
}
|
||||
|
||||
|
@ -14,7 +14,7 @@
|
||||
//
|
||||
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
import { MeasureContext, type WorkspaceDataId, generateId, metricsAggregate } from '@hcengineering/core'
|
||||
import { MeasureContext, generateId, metricsAggregate } from '@hcengineering/core'
|
||||
import type { StorageAdapter } from '@hcengineering/server-core'
|
||||
import { Token, decodeToken } from '@hcengineering/server-token'
|
||||
import { Hocuspocus } from '@hocuspocus/server'
|
||||
@ -105,12 +105,11 @@ export async function start (ctx: MeasureContext, config: Config, storageAdapter
|
||||
const rpcCtx = ctx.newChild('rpc', {})
|
||||
|
||||
const getContext = async (rawToken: string, token: Token): Promise<Context> => {
|
||||
const ids = await getWorkspaceIds(rawToken)
|
||||
const wsIds = await getWorkspaceIds(rawToken)
|
||||
|
||||
return {
|
||||
connectionId: generateId(),
|
||||
workspaceId: ids.uuid,
|
||||
workspaceDataId: ids.dataId ?? (ids.uuid as unknown as WorkspaceDataId),
|
||||
wsIds,
|
||||
clientFactory: simpleClientFactory(token)
|
||||
}
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
|
||||
constructor (private readonly storage: StorageAdapter) {}
|
||||
|
||||
async loadDocument (ctx: MeasureContext, documentName: string, context: Context): Promise<YDoc | undefined> {
|
||||
const { content, workspaceDataId } = context
|
||||
const { content, wsIds } = context
|
||||
const { documentId } = decodeDocumentId(documentName)
|
||||
|
||||
// try to load document content
|
||||
@ -39,7 +39,7 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
|
||||
|
||||
const ydoc = await ctx.with('loadCollabYdoc', {}, (ctx) => {
|
||||
return withRetry(ctx, 5, () => {
|
||||
return loadCollabYdoc(ctx, this.storage, workspaceDataId, documentId)
|
||||
return loadCollabYdoc(ctx, this.storage, wsIds, documentId)
|
||||
})
|
||||
})
|
||||
|
||||
@ -59,7 +59,7 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
|
||||
|
||||
const markup = await ctx.with('loadCollabJson', {}, (ctx) => {
|
||||
return withRetry(ctx, 5, () => {
|
||||
return loadCollabJson(ctx, this.storage, workspaceDataId, content)
|
||||
return loadCollabJson(ctx, this.storage, wsIds, content)
|
||||
})
|
||||
})
|
||||
if (markup !== undefined) {
|
||||
@ -67,7 +67,7 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
|
||||
|
||||
// if document was loaded from the initial content or storage we need to save
|
||||
// it to ensure the next time we load it from the ydoc document
|
||||
await saveCollabYdoc(ctx, this.storage, workspaceDataId, documentId, ydoc)
|
||||
await saveCollabYdoc(ctx, this.storage, wsIds, documentId, ydoc)
|
||||
|
||||
return ydoc
|
||||
}
|
||||
@ -92,14 +92,14 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
|
||||
curr: () => Record<string, string>
|
||||
}
|
||||
): Promise<Record<string, string> | undefined> {
|
||||
const { clientFactory, workspaceDataId } = context
|
||||
const { clientFactory, wsIds } = context
|
||||
const { documentId } = decodeDocumentId(documentName)
|
||||
|
||||
try {
|
||||
ctx.info('save document ydoc content', { documentName })
|
||||
await ctx.with('saveCollabYdoc', {}, (ctx) => {
|
||||
return withRetry(ctx, 5, () => {
|
||||
return saveCollabYdoc(ctx, this.storage, workspaceDataId, documentId, document)
|
||||
return saveCollabYdoc(ctx, this.storage, wsIds, documentId, document)
|
||||
})
|
||||
})
|
||||
} catch (err: any) {
|
||||
@ -139,7 +139,7 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
|
||||
curr: () => Record<string, string>
|
||||
}
|
||||
): Promise<Record<string, string> | undefined> {
|
||||
const { workspaceDataId } = context
|
||||
const { wsIds } = context
|
||||
const { documentId } = decodeDocumentId(documentName)
|
||||
const { objectAttr, objectClass, objectId } = documentId
|
||||
|
||||
@ -179,7 +179,7 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
|
||||
|
||||
const blobId = await ctx.with('saveCollabJson', {}, (ctx) => {
|
||||
return withRetry(ctx, 5, () => {
|
||||
return saveCollabJson(ctx, this.storage, workspaceDataId, documentId, markup.curr[objectAttr])
|
||||
return saveCollabJson(ctx, this.storage, wsIds, documentId, markup.curr[objectAttr])
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -13,7 +13,7 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { type MeasureContext, type WorkspaceDataId, concatLink } from '@hcengineering/core'
|
||||
import { type MeasureContext, type WorkspaceUuid, concatLink } from '@hcengineering/core'
|
||||
import FormData from 'form-data'
|
||||
import fetch, { type RequestInfo, type RequestInit, type Response } from 'node-fetch'
|
||||
import { Readable } from 'stream'
|
||||
@ -89,14 +89,14 @@ export class DatalakeClient {
|
||||
this.headers = { Authorization: 'Bearer ' + token }
|
||||
}
|
||||
|
||||
getObjectUrl (ctx: MeasureContext, wsDataId: WorkspaceDataId, objectName: string): string {
|
||||
const path = `/blob/${wsDataId}/${encodeURIComponent(objectName)}`
|
||||
getObjectUrl (ctx: MeasureContext, workspace: WorkspaceUuid, objectName: string): string {
|
||||
const path = `/blob/${workspace}/${encodeURIComponent(objectName)}`
|
||||
return concatLink(this.endpoint, path)
|
||||
}
|
||||
|
||||
async listObjects (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
cursor: string | undefined,
|
||||
limit: number = 100
|
||||
): Promise<ListObjectOutput> {
|
||||
@ -111,7 +111,7 @@ export class DatalakeClient {
|
||||
return (await response.json()) as ListObjectOutput
|
||||
}
|
||||
|
||||
async getObject (ctx: MeasureContext, workspace: WorkspaceDataId, objectName: string): Promise<Readable> {
|
||||
async getObject (ctx: MeasureContext, workspace: WorkspaceUuid, objectName: string): Promise<Readable> {
|
||||
const url = this.getObjectUrl(ctx, workspace, objectName)
|
||||
|
||||
let response
|
||||
@ -134,7 +134,7 @@ export class DatalakeClient {
|
||||
|
||||
async getPartialObject (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
offset: number,
|
||||
length?: number
|
||||
@ -165,7 +165,7 @@ export class DatalakeClient {
|
||||
|
||||
async statObject (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string
|
||||
): Promise<StatObjectOutput | undefined> {
|
||||
const url = this.getObjectUrl(ctx, workspace, objectName)
|
||||
@ -196,7 +196,7 @@ export class DatalakeClient {
|
||||
}
|
||||
}
|
||||
|
||||
async deleteObject (ctx: MeasureContext, workspace: WorkspaceDataId, objectName: string): Promise<void> {
|
||||
async deleteObject (ctx: MeasureContext, workspace: WorkspaceUuid, objectName: string): Promise<void> {
|
||||
const url = this.getObjectUrl(ctx, workspace, objectName)
|
||||
try {
|
||||
await fetchSafe(ctx, url, {
|
||||
@ -213,7 +213,7 @@ export class DatalakeClient {
|
||||
|
||||
async putObject (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
stream: Readable | Buffer | string,
|
||||
params: UploadObjectParams
|
||||
@ -248,7 +248,7 @@ export class DatalakeClient {
|
||||
|
||||
async uploadWithFormData (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
stream: Readable | Buffer | string,
|
||||
params: UploadObjectParams
|
||||
@ -289,7 +289,7 @@ export class DatalakeClient {
|
||||
|
||||
async uploadMultipart (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
stream: Readable | Buffer | string,
|
||||
params: UploadObjectParams
|
||||
@ -317,7 +317,7 @@ export class DatalakeClient {
|
||||
|
||||
async uploadWithSignedURL (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
stream: Readable | Buffer | string,
|
||||
params: UploadObjectParams
|
||||
@ -348,7 +348,7 @@ export class DatalakeClient {
|
||||
|
||||
async uploadFromS3 (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
params: {
|
||||
url: string
|
||||
@ -371,7 +371,7 @@ export class DatalakeClient {
|
||||
|
||||
// R2
|
||||
|
||||
async getR2UploadParams (ctx: MeasureContext, workspace: WorkspaceDataId): Promise<R2UploadParams> {
|
||||
async getR2UploadParams (ctx: MeasureContext, workspace: WorkspaceUuid): Promise<R2UploadParams> {
|
||||
const path = `/upload/r2/${workspace}`
|
||||
const url = concatLink(this.endpoint, path)
|
||||
|
||||
@ -382,7 +382,7 @@ export class DatalakeClient {
|
||||
|
||||
async uploadFromR2 (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
params: {
|
||||
filename: string
|
||||
@ -403,7 +403,7 @@ export class DatalakeClient {
|
||||
|
||||
// Signed URL
|
||||
|
||||
private async signObjectSign (ctx: MeasureContext, workspace: WorkspaceDataId, objectName: string): Promise<string> {
|
||||
private async signObjectSign (ctx: MeasureContext, workspace: WorkspaceUuid, objectName: string): Promise<string> {
|
||||
try {
|
||||
const url = this.getSignObjectUrl(workspace, objectName)
|
||||
const response = await fetchSafe(ctx, url, { method: 'POST', headers: { ...this.headers } })
|
||||
@ -416,7 +416,7 @@ export class DatalakeClient {
|
||||
|
||||
private async signObjectComplete (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string
|
||||
): Promise<ObjectMetadata> {
|
||||
try {
|
||||
@ -429,7 +429,7 @@ export class DatalakeClient {
|
||||
}
|
||||
}
|
||||
|
||||
private async signObjectDelete (ctx: MeasureContext, workspace: WorkspaceDataId, objectName: string): Promise<void> {
|
||||
private async signObjectDelete (ctx: MeasureContext, workspace: WorkspaceUuid, objectName: string): Promise<void> {
|
||||
try {
|
||||
const url = this.getSignObjectUrl(workspace, objectName)
|
||||
await fetchSafe(ctx, url, { method: 'DELETE', headers: { ...this.headers } })
|
||||
@ -439,7 +439,7 @@ export class DatalakeClient {
|
||||
}
|
||||
}
|
||||
|
||||
private getSignObjectUrl (workspace: WorkspaceDataId, objectName: string): string {
|
||||
private getSignObjectUrl (workspace: WorkspaceUuid, objectName: string): string {
|
||||
const path = `/upload/signed-url/${workspace}/${encodeURIComponent(objectName)}`
|
||||
return concatLink(this.endpoint, path)
|
||||
}
|
||||
@ -448,7 +448,7 @@ export class DatalakeClient {
|
||||
|
||||
private async multipartUploadStart (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
params: UploadObjectParams
|
||||
): Promise<MultipartUpload> {
|
||||
@ -472,7 +472,7 @@ export class DatalakeClient {
|
||||
|
||||
private async multipartUploadPart (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
multipart: MultipartUpload,
|
||||
partNumber: number,
|
||||
@ -499,7 +499,7 @@ export class DatalakeClient {
|
||||
|
||||
private async multipartUploadComplete (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
multipart: MultipartUpload,
|
||||
parts: MultipartUploadPart[]
|
||||
@ -524,7 +524,7 @@ export class DatalakeClient {
|
||||
|
||||
private async multipartUploadAbort (
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceDataId,
|
||||
workspace: WorkspaceUuid,
|
||||
objectName: string,
|
||||
multipart: MultipartUpload
|
||||
): Promise<void> {
|
||||
|
@ -17,7 +17,7 @@ import core, {
|
||||
type Blob,
|
||||
type MeasureContext,
|
||||
type Ref,
|
||||
type WorkspaceDataId,
|
||||
type WorkspaceIds,
|
||||
systemAccountUuid,
|
||||
withContext
|
||||
} from '@hcengineering/core'
|
||||
@ -64,17 +64,17 @@ export class DatalakeService implements StorageAdapter {
|
||||
this.client = createDatalakeClient(opt, token)
|
||||
}
|
||||
|
||||
async initialize (ctx: MeasureContext, workspaceId: WorkspaceDataId): Promise<void> {}
|
||||
async initialize (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {}
|
||||
|
||||
async close (): Promise<void> {}
|
||||
|
||||
async exists (ctx: MeasureContext, workspaceId: WorkspaceDataId): Promise<boolean> {
|
||||
async exists (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<boolean> {
|
||||
// workspace/buckets not supported, assume that always exist
|
||||
return true
|
||||
}
|
||||
|
||||
@withContext('make')
|
||||
async make (ctx: MeasureContext, workspaceId: WorkspaceDataId): Promise<void> {
|
||||
async make (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {
|
||||
// workspace/buckets not supported, assume that always exist
|
||||
}
|
||||
|
||||
@ -83,21 +83,21 @@ export class DatalakeService implements StorageAdapter {
|
||||
}
|
||||
|
||||
@withContext('remove')
|
||||
async remove (ctx: MeasureContext, workspaceId: WorkspaceDataId, objectNames: string[]): Promise<void> {
|
||||
async remove (ctx: MeasureContext, wsIds: WorkspaceIds, objectNames: string[]): Promise<void> {
|
||||
await Promise.all(
|
||||
objectNames.map(async (objectName) => {
|
||||
await this.client.deleteObject(ctx, workspaceId, objectName)
|
||||
await this.client.deleteObject(ctx, wsIds.uuid, objectName)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
@withContext('delete')
|
||||
async delete (ctx: MeasureContext, workspaceId: WorkspaceDataId): Promise<void> {
|
||||
async delete (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {
|
||||
// not supported, just do nothing and pretend we deleted the workspace
|
||||
}
|
||||
|
||||
@withContext('listStream')
|
||||
async listStream (ctx: MeasureContext, workspaceId: WorkspaceDataId): Promise<BlobStorageIterator> {
|
||||
async listStream (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<BlobStorageIterator> {
|
||||
let hasMore = true
|
||||
const buffer: ListBlobResult[] = []
|
||||
let cursor: string | undefined
|
||||
@ -106,7 +106,7 @@ export class DatalakeService implements StorageAdapter {
|
||||
next: async () => {
|
||||
try {
|
||||
while (hasMore && buffer.length < 50) {
|
||||
const res = await this.client.listObjects(ctx, workspaceId, cursor)
|
||||
const res = await this.client.listObjects(ctx, wsIds.uuid, cursor)
|
||||
hasMore = res.cursor !== undefined
|
||||
cursor = res.cursor
|
||||
|
||||
@ -124,7 +124,7 @@ export class DatalakeService implements StorageAdapter {
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
ctx.error('Failed to get list', { error: err, workspaceId })
|
||||
ctx.error('Failed to get list', { error: err, workspace: wsIds.uuid })
|
||||
}
|
||||
return buffer.splice(0, 50)
|
||||
},
|
||||
@ -133,9 +133,9 @@ export class DatalakeService implements StorageAdapter {
|
||||
}
|
||||
|
||||
@withContext('stat')
|
||||
async stat (ctx: MeasureContext, workspaceId: WorkspaceDataId, objectName: string): Promise<Blob | undefined> {
|
||||
async stat (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Blob | undefined> {
|
||||
try {
|
||||
const result = await this.client.statObject(ctx, workspaceId, objectName)
|
||||
const result = await this.client.statObject(ctx, wsIds.uuid, objectName)
|
||||
if (result !== undefined) {
|
||||
return {
|
||||
provider: '',
|
||||
@ -151,19 +151,19 @@ export class DatalakeService implements StorageAdapter {
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
ctx.error('failed to stat object', { error: err, objectName, workspaceId })
|
||||
ctx.error('failed to stat object', { error: err, objectName, workspace: wsIds.uuid })
|
||||
}
|
||||
}
|
||||
|
||||
@withContext('get')
|
||||
async get (ctx: MeasureContext, workspaceId: WorkspaceDataId, objectName: string): Promise<Readable> {
|
||||
return await this.client.getObject(ctx, workspaceId, objectName)
|
||||
async get (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Readable> {
|
||||
return await this.client.getObject(ctx, wsIds.uuid, objectName)
|
||||
}
|
||||
|
||||
@withContext('put')
|
||||
async put (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
stream: Readable | Buffer | string,
|
||||
contentType: string,
|
||||
@ -176,7 +176,7 @@ export class DatalakeService implements StorageAdapter {
|
||||
}
|
||||
|
||||
const { etag } = await ctx.with('put', {}, (ctx) =>
|
||||
withRetry(ctx, 5, () => this.client.putObject(ctx, workspaceId, objectName, stream, params))
|
||||
withRetry(ctx, 5, () => this.client.putObject(ctx, wsIds.uuid, objectName, stream, params))
|
||||
)
|
||||
|
||||
return {
|
||||
@ -186,8 +186,8 @@ export class DatalakeService implements StorageAdapter {
|
||||
}
|
||||
|
||||
@withContext('read')
|
||||
async read (ctx: MeasureContext, workspaceId: WorkspaceDataId, objectName: string): Promise<Buffer[]> {
|
||||
const data = await this.client.getObject(ctx, workspaceId, objectName)
|
||||
async read (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Buffer[]> {
|
||||
const data = await this.client.getObject(ctx, wsIds.uuid, objectName)
|
||||
const chunks: Buffer[] = []
|
||||
|
||||
for await (const chunk of data) {
|
||||
@ -200,16 +200,16 @@ export class DatalakeService implements StorageAdapter {
|
||||
@withContext('partial')
|
||||
async partial (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
offset: number,
|
||||
length?: number
|
||||
): Promise<Readable> {
|
||||
return await this.client.getPartialObject(ctx, workspaceId, objectName, offset, length)
|
||||
return await this.client.getPartialObject(ctx, wsIds.uuid, objectName, offset, length)
|
||||
}
|
||||
|
||||
async getUrl (ctx: MeasureContext, workspaceId: WorkspaceDataId, objectName: string): Promise<string> {
|
||||
return this.client.getObjectUrl(ctx, workspaceId, objectName)
|
||||
async getUrl (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<string> {
|
||||
return this.client.getObjectUrl(ctx, wsIds.uuid, objectName)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { MeasureMetricsContext, type WorkspaceDataId, generateId } from '@hcengineering/core'
|
||||
import { MeasureMetricsContext, type WorkspaceDataId, type WorkspaceUuid, generateId } from '@hcengineering/core'
|
||||
import type { StorageConfiguration } from '@hcengineering/server-core'
|
||||
import { DatalakeService, processConfigFromEnv, type DatalakeConfig } from '.'
|
||||
|
||||
@ -17,8 +17,12 @@ const storageService = new DatalakeService({ ...(config.storages[0] as DatalakeC
|
||||
async function doTest (): Promise<void> {
|
||||
const genWorkspaceId1 = generateId() as unknown as WorkspaceDataId
|
||||
|
||||
const ws1 = genWorkspaceId1
|
||||
await storageService.make(toolCtx, ws1)
|
||||
const wsIds1 = {
|
||||
uuid: genWorkspaceId1 as unknown as WorkspaceUuid,
|
||||
dataId: genWorkspaceId1,
|
||||
url: ''
|
||||
}
|
||||
await storageService.make(toolCtx, wsIds1)
|
||||
|
||||
/// /////// Uploads
|
||||
console.log('upload 1mb test')
|
||||
@ -28,7 +32,7 @@ async function doTest (): Promise<void> {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
// We need 1Mb random file to check upload speed.
|
||||
const st = Date.now()
|
||||
await storageService.put(toolCtx, ws1, `testObject.${i}`, stream, 'application/octet-stream', stream.length)
|
||||
await storageService.put(toolCtx, wsIds1, `testObject.${i}`, stream, 'application/octet-stream', stream.length)
|
||||
console.log('upload time', Date.now() - st)
|
||||
}
|
||||
let now = Date.now()
|
||||
@ -40,7 +44,7 @@ async function doTest (): Promise<void> {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
// We need 1Mb random file to check upload speed.
|
||||
const st = Date.now()
|
||||
await storageService.read(toolCtx, ws1, `testObject.${i}`)
|
||||
await storageService.read(toolCtx, wsIds1, `testObject.${i}`)
|
||||
console.log('download time', Date.now() - st)
|
||||
}
|
||||
|
||||
@ -52,7 +56,7 @@ async function doTest (): Promise<void> {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
// We need 1Mb random file to check upload speed.
|
||||
const st = Date.now()
|
||||
const readable = await storageService.get(toolCtx, ws1, `testObject.${i}`)
|
||||
const readable = await storageService.get(toolCtx, wsIds1, `testObject.${i}`)
|
||||
const chunks: Buffer[] = []
|
||||
readable.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
@ -76,7 +80,7 @@ async function doTest (): Promise<void> {
|
||||
// We need 1Mb random file to check upload speed.
|
||||
const st = Date.now()
|
||||
for (let i = 0; i < sz; i++) {
|
||||
const readable = await storageService.partial(toolCtx, ws1, `testObject.${i}`, i * MB, MB)
|
||||
const readable = await storageService.partial(toolCtx, wsIds1, `testObject.${i}`, i * MB, MB)
|
||||
const chunks: Buffer[] = []
|
||||
readable.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
|
@ -15,7 +15,14 @@
|
||||
//
|
||||
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
import { MeasureContext, Blob as PlatformBlob, WorkspaceDataId, metricsAggregate, type Ref } from '@hcengineering/core'
|
||||
import {
|
||||
MeasureContext,
|
||||
Blob as PlatformBlob,
|
||||
WorkspaceDataId,
|
||||
WorkspaceIds,
|
||||
metricsAggregate,
|
||||
type Ref
|
||||
} from '@hcengineering/core'
|
||||
import { decodeToken } from '@hcengineering/server-token'
|
||||
import { StorageAdapter } from '@hcengineering/storage'
|
||||
import bp from 'body-parser'
|
||||
@ -42,15 +49,15 @@ const cacheControlNoCache = 'public, no-store, no-cache, must-revalidate, max-ag
|
||||
async function storageUpload (
|
||||
ctx: MeasureContext,
|
||||
storageAdapter: StorageAdapter,
|
||||
workspace: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
file: UploadedFile
|
||||
): Promise<string> {
|
||||
const uuid = file.name
|
||||
const data = file.tempFilePath !== undefined ? fs.createReadStream(file.tempFilePath) : file.data
|
||||
const resp = await ctx.with(
|
||||
'storage upload',
|
||||
{ workspace },
|
||||
(ctx) => storageAdapter.put(ctx, workspace, uuid, data, file.mimetype, file.size),
|
||||
{ workspace: wsIds.uuid },
|
||||
(ctx) => storageAdapter.put(ctx, wsIds, uuid, data, file.mimetype, file.size),
|
||||
{ file: file.name, contentType: file.mimetype }
|
||||
)
|
||||
|
||||
@ -81,7 +88,7 @@ async function getFileRange (
|
||||
stat: PlatformBlob,
|
||||
range: string,
|
||||
client: StorageAdapter,
|
||||
workspace: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
res: Response
|
||||
): Promise<void> {
|
||||
const uuid = stat._id
|
||||
@ -105,7 +112,7 @@ async function getFileRange (
|
||||
const dataStream = await ctx.with(
|
||||
'partial',
|
||||
{},
|
||||
(ctx) => client.partial(ctx, workspace, stat._id, start, end - start + 1),
|
||||
(ctx) => client.partial(ctx, wsIds, stat._id, start, end - start + 1),
|
||||
{}
|
||||
)
|
||||
res.writeHead(206, {
|
||||
@ -129,7 +136,7 @@ async function getFileRange (
|
||||
resolve()
|
||||
})
|
||||
dataStream.on('error', (err) => {
|
||||
ctx.error('error receive stream', { workspace, uuid, error: err })
|
||||
ctx.error('error receive stream', { workspace: wsIds.uuid, uuid, error: err })
|
||||
Analytics.handleError(err)
|
||||
|
||||
res.end()
|
||||
@ -144,7 +151,7 @@ async function getFileRange (
|
||||
err?.message === 'No such key' ||
|
||||
err?.Code === 'NoSuchKey'
|
||||
) {
|
||||
ctx.info('No such key', { workspace, uuid })
|
||||
ctx.info('No such key', { workspace: wsIds.uuid, uuid })
|
||||
res.status(404).send()
|
||||
return
|
||||
} else {
|
||||
@ -162,7 +169,7 @@ async function getFile (
|
||||
ctx: MeasureContext,
|
||||
stat: PlatformBlob,
|
||||
client: StorageAdapter,
|
||||
workspace: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
req: Request,
|
||||
res: Response
|
||||
): Promise<void> {
|
||||
@ -204,7 +211,7 @@ async function getFile (
|
||||
{ contentType: stat.contentType },
|
||||
async (ctx) => {
|
||||
try {
|
||||
const dataStream = await ctx.with('readable', {}, (ctx) => client.get(ctx, workspace, stat._id))
|
||||
const dataStream = await ctx.with('readable', {}, (ctx) => client.get(ctx, wsIds, stat._id))
|
||||
res.writeHead(200, {
|
||||
'Content-Type': stat.contentType,
|
||||
'Content-Security-Policy': "default-src 'none';",
|
||||
@ -233,7 +240,7 @@ async function getFile (
|
||||
})
|
||||
})
|
||||
} catch (err: any) {
|
||||
ctx.error('get-file-error', { workspace, err })
|
||||
ctx.error('get-file-error', { workspace: wsIds.uuid, err })
|
||||
Analytics.handleError(err)
|
||||
res.status(500).send()
|
||||
}
|
||||
@ -411,16 +418,17 @@ export function start (
|
||||
})
|
||||
)
|
||||
|
||||
const getWorkspaceDataId = async (
|
||||
ctx: MeasureContext,
|
||||
token: string,
|
||||
path?: string
|
||||
): Promise<WorkspaceDataId | null> => {
|
||||
const getWorkspaceIds = async (ctx: MeasureContext, token: string, path?: string): Promise<WorkspaceIds | null> => {
|
||||
const accountClient = getAccountClient(config.accountsUrlInternal ?? config.accountsUrl, token)
|
||||
const workspaceInfo = await accountClient.getWorkspaceInfo()
|
||||
const wsIds = {
|
||||
uuid: workspaceInfo.uuid,
|
||||
dataId: workspaceInfo.dataId,
|
||||
url: workspaceInfo.url
|
||||
}
|
||||
const actualDataId = workspaceInfo.dataId ?? (workspaceInfo.uuid as unknown as WorkspaceDataId)
|
||||
if (path === undefined) {
|
||||
return actualDataId
|
||||
return wsIds
|
||||
}
|
||||
|
||||
const expectedDataId = path.split('/')[2]
|
||||
@ -435,7 +443,7 @@ export function start (
|
||||
return null
|
||||
}
|
||||
|
||||
return actualDataId
|
||||
return wsIds
|
||||
}
|
||||
|
||||
const filesHandler = async (req: Request<any>, res: Response<any>): Promise<void> => {
|
||||
@ -450,8 +458,8 @@ export function start (
|
||||
cookies.find((it) => it[0] === 'presentation-metadata-Token')?.[1] ??
|
||||
(req.query.token as string | undefined) ??
|
||||
''
|
||||
const workspaceDataId = await getWorkspaceDataId(ctx, token, req.path)
|
||||
if (workspaceDataId === null) {
|
||||
const wsIds = await getWorkspaceIds(ctx, token, req.path)
|
||||
if (wsIds === null) {
|
||||
res.status(403).send()
|
||||
return
|
||||
}
|
||||
@ -462,12 +470,12 @@ export function start (
|
||||
return
|
||||
}
|
||||
|
||||
let blobInfo = await ctx.with('stat', { workspace: workspaceDataId }, (ctx) =>
|
||||
config.storageAdapter.stat(ctx, workspaceDataId, uuid)
|
||||
let blobInfo = await ctx.with('stat', { workspace: wsIds.uuid }, (ctx) =>
|
||||
config.storageAdapter.stat(ctx, wsIds, uuid)
|
||||
)
|
||||
|
||||
if (blobInfo === undefined) {
|
||||
ctx.error('No such key', { file: uuid, workspace: workspaceDataId })
|
||||
ctx.error('No such key', { file: uuid, workspace: wsIds.uuid })
|
||||
res.status(404).send()
|
||||
return
|
||||
}
|
||||
@ -494,7 +502,7 @@ export function start (
|
||||
const accept = req.headers.accept
|
||||
if (accept !== undefined && isImage && blobInfo.contentType !== 'image/gif' && size !== undefined) {
|
||||
blobInfo = await ctx.with('resize', {}, (ctx) =>
|
||||
getGeneratePreview(ctx, blobInfo as PlatformBlob, size, uuid, config, workspaceDataId, accept, () =>
|
||||
getGeneratePreview(ctx, blobInfo as PlatformBlob, size, uuid, config, wsIds, accept, () =>
|
||||
join(tempFileDir, `${++temoFileIndex}`)
|
||||
)
|
||||
)
|
||||
@ -502,14 +510,14 @@ export function start (
|
||||
|
||||
const range = req.headers.range
|
||||
if (range !== undefined) {
|
||||
await ctx.with('file-range', { workspace: workspaceDataId }, (ctx) =>
|
||||
getFileRange(ctx, blobInfo as PlatformBlob, range, config.storageAdapter, workspaceDataId, res)
|
||||
await ctx.with('file-range', { workspace: wsIds.uuid }, (ctx) =>
|
||||
getFileRange(ctx, blobInfo as PlatformBlob, range, config.storageAdapter, wsIds, res)
|
||||
)
|
||||
} else {
|
||||
await ctx.with(
|
||||
'file',
|
||||
{ workspace: workspaceDataId },
|
||||
(ctx) => getFile(ctx, blobInfo as PlatformBlob, config.storageAdapter, workspaceDataId, req, res),
|
||||
{ workspace: wsIds.uuid },
|
||||
(ctx) => getFile(ctx, blobInfo as PlatformBlob, config.storageAdapter, wsIds, req, res),
|
||||
{ uuid }
|
||||
)
|
||||
}
|
||||
@ -575,7 +583,7 @@ export function start (
|
||||
|
||||
try {
|
||||
const token = authHeader.split(' ')[1]
|
||||
const workspaceDataId = await getWorkspaceDataId(ctx, token, req.path)
|
||||
const workspaceDataId = await getWorkspaceIds(ctx, token, req.path)
|
||||
if (workspaceDataId === null) {
|
||||
res.status(403).send()
|
||||
return
|
||||
@ -606,7 +614,7 @@ export function start (
|
||||
}
|
||||
|
||||
const token = authHeader.split(' ')[1]
|
||||
const workspaceDataId = await getWorkspaceDataId(ctx, token, req.path)
|
||||
const workspaceDataId = await getWorkspaceIds(ctx, token, req.path)
|
||||
if (workspaceDataId === null) {
|
||||
res.status(403).send()
|
||||
return
|
||||
@ -636,7 +644,7 @@ export function start (
|
||||
return
|
||||
}
|
||||
const token = authHeader.split(' ')[1]
|
||||
const workspaceDataId = await getWorkspaceDataId(ctx, token)
|
||||
const workspaceDataId = await getWorkspaceIds(ctx, token)
|
||||
if (workspaceDataId === null) {
|
||||
res.status(403).send()
|
||||
return
|
||||
@ -720,7 +728,7 @@ export function start (
|
||||
return
|
||||
}
|
||||
const token = authHeader.split(' ')[1]
|
||||
const workspaceDataId = await getWorkspaceDataId(ctx, token)
|
||||
const workspaceDataId = await getWorkspaceIds(ctx, token)
|
||||
if (workspaceDataId === null) {
|
||||
res.status(403).send()
|
||||
return
|
||||
@ -858,7 +866,7 @@ async function getGeneratePreview (
|
||||
size: number | undefined,
|
||||
uuid: string,
|
||||
config: { storageAdapter: StorageAdapter },
|
||||
wsDataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
accept: string,
|
||||
tempFile: () => string
|
||||
): Promise<PlatformBlob> {
|
||||
@ -893,7 +901,7 @@ async function getGeneratePreview (
|
||||
|
||||
const sizeId = uuid + `%preview%${size}${format !== 'jpeg' ? format : ''}`
|
||||
|
||||
const d = await config.storageAdapter.stat(ctx, wsDataId, sizeId)
|
||||
const d = await config.storageAdapter.stat(ctx, wsIds, sizeId)
|
||||
const hasSmall = d !== undefined && d.size > 0
|
||||
|
||||
if (hasSmall) {
|
||||
@ -906,7 +914,7 @@ async function getGeneratePreview (
|
||||
// Let's get data and resize it
|
||||
const fname = tempFile()
|
||||
files.push(fname)
|
||||
await writeFile(fname, await config.storageAdapter.get(ctx, wsDataId, uuid))
|
||||
await writeFile(fname, await config.storageAdapter.get(ctx, wsIds, uuid))
|
||||
|
||||
pipeline = sharp(fname)
|
||||
const md = await pipeline.metadata()
|
||||
@ -966,7 +974,7 @@ async function getGeneratePreview (
|
||||
// Add support of avif as well.
|
||||
const upload = await config.storageAdapter.put(
|
||||
ctx,
|
||||
wsDataId,
|
||||
wsIds,
|
||||
sizeId,
|
||||
createReadStream(outFile),
|
||||
contentType,
|
||||
|
@ -41,7 +41,6 @@ import core, {
|
||||
type WithLookup,
|
||||
coreId,
|
||||
type WorkspaceIds,
|
||||
type WorkspaceDataId,
|
||||
docKey,
|
||||
generateId,
|
||||
getFullTextIndexableAttributes,
|
||||
@ -134,10 +133,6 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
this.contexts = new Map(model.findAllSync(core.class.FullTextSearchContext, {}).map((it) => [it.toClass, it]))
|
||||
}
|
||||
|
||||
get workspaceDataId (): WorkspaceDataId {
|
||||
return this.workspace.dataId ?? (this.workspace.uuid as unknown as WorkspaceDataId)
|
||||
}
|
||||
|
||||
async cancel (): Promise<void> {
|
||||
this.cancelling = true
|
||||
await this.verify
|
||||
@ -811,7 +806,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
const value = v.value as Ref<Blob>
|
||||
if (value !== undefined && value !== '') {
|
||||
try {
|
||||
const readable = await this.storageAdapter?.read(ctx, this.workspaceDataId, value)
|
||||
const readable = await this.storageAdapter?.read(ctx, this.workspace, value)
|
||||
const markup = Buffer.concat(readable as any).toString()
|
||||
let textContent = markupToText(markup)
|
||||
textContent = textContent
|
||||
@ -859,7 +854,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
return
|
||||
}
|
||||
}
|
||||
const docInfo: Blob | undefined = await this.storageAdapter.stat(ctx, this.workspaceDataId, ref)
|
||||
const docInfo: Blob | undefined = await this.storageAdapter.stat(ctx, this.workspace, ref)
|
||||
if (docInfo !== undefined && docInfo.size < 30 * 1024 * 1024) {
|
||||
// We have blob, we need to decode it to string.
|
||||
const contentType = (docInfo.contentType ?? '').split(';')[0]
|
||||
@ -884,7 +879,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
private async handleBlob (ctx: MeasureContext<any>, docInfo: Blob | undefined, indexedDoc: IndexedDoc): Promise<void> {
|
||||
if (docInfo !== undefined) {
|
||||
const contentType = (docInfo.contentType ?? '').split(';')[0]
|
||||
const readable = await this.storageAdapter?.get(ctx, this.workspaceDataId, docInfo._id)
|
||||
const readable = await this.storageAdapter?.get(ctx, this.workspace, docInfo._id)
|
||||
|
||||
if (readable !== undefined) {
|
||||
try {
|
||||
@ -913,7 +908,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
): Promise<void> {
|
||||
if (docInfo !== undefined) {
|
||||
let textContent = Buffer.concat(
|
||||
(await this.storageAdapter?.read(ctx, this.workspaceDataId, docInfo._id)) as any
|
||||
(await this.storageAdapter?.read(ctx, this.workspace, docInfo._id)) as any
|
||||
).toString()
|
||||
|
||||
textContent = textContent
|
||||
|
@ -13,7 +13,7 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { MeasureMetricsContext, type WorkspaceDataId, generateId } from '@hcengineering/core'
|
||||
import { MeasureMetricsContext, type WorkspaceDataId, type WorkspaceUuid, generateId } from '@hcengineering/core'
|
||||
import { objectsToArray, type StorageConfiguration } from '@hcengineering/server-core'
|
||||
import { MinioService, processConfigFromEnv, type MinioConfig } from '..'
|
||||
|
||||
@ -41,28 +41,36 @@ describe('minio operations', () => {
|
||||
|
||||
expect(genWorkspaceId1).not.toEqual(genWorkspaceId2)
|
||||
|
||||
const ws1 = genWorkspaceId1
|
||||
const ws2 = genWorkspaceId2
|
||||
await minioService.make(toolCtx, ws1)
|
||||
await minioService.make(toolCtx, ws2)
|
||||
const wsIds1 = {
|
||||
uuid: genWorkspaceId1 as unknown as WorkspaceUuid,
|
||||
dataId: genWorkspaceId1,
|
||||
url: ''
|
||||
}
|
||||
const wsIds2 = {
|
||||
uuid: genWorkspaceId2 as unknown as WorkspaceUuid,
|
||||
dataId: genWorkspaceId2,
|
||||
url: ''
|
||||
}
|
||||
await minioService.make(toolCtx, wsIds1)
|
||||
await minioService.make(toolCtx, wsIds2)
|
||||
|
||||
const v1 = generateId()
|
||||
await minioService.put(toolCtx, ws1, 'obj1.txt', v1, 'text/plain')
|
||||
await minioService.put(toolCtx, ws2, 'obj2.txt', v1, 'text/plain')
|
||||
await minioService.put(toolCtx, wsIds1, 'obj1.txt', v1, 'text/plain')
|
||||
await minioService.put(toolCtx, wsIds2, 'obj2.txt', v1, 'text/plain')
|
||||
|
||||
const w1Objects = await objectsToArray(toolCtx, minioService, ws1)
|
||||
const w1Objects = await objectsToArray(toolCtx, minioService, wsIds1)
|
||||
expect(w1Objects.map((it) => it._id)).toEqual(['obj1.txt'])
|
||||
|
||||
const w2Objects = await objectsToArray(toolCtx, minioService, ws2)
|
||||
const w2Objects = await objectsToArray(toolCtx, minioService, wsIds2)
|
||||
expect(w2Objects.map((it) => it._id)).toEqual(['obj2.txt'])
|
||||
|
||||
await minioService.put(toolCtx, ws1, 'obj1.txt', 'obj1', 'text/plain')
|
||||
await minioService.put(toolCtx, ws1, 'obj2.txt', 'obj2', 'text/plain')
|
||||
await minioService.put(toolCtx, wsIds1, 'obj1.txt', 'obj1', 'text/plain')
|
||||
await minioService.put(toolCtx, wsIds1, 'obj2.txt', 'obj2', 'text/plain')
|
||||
|
||||
const w1Objects2 = await objectsToArray(toolCtx, minioService, ws1)
|
||||
const w1Objects2 = await objectsToArray(toolCtx, minioService, wsIds1)
|
||||
expect(w1Objects2.map((it) => it._id)).toEqual(['obj1.txt', 'obj2.txt'])
|
||||
|
||||
const data = Buffer.concat(await minioService.read(toolCtx, ws1, 'obj1.txt'))
|
||||
const data = Buffer.concat(await minioService.read(toolCtx, wsIds1, 'obj1.txt'))
|
||||
|
||||
expect('obj1').toEqual(data.toString())
|
||||
|
||||
|
@ -15,10 +15,19 @@
|
||||
|
||||
import { Client, type BucketItem, type BucketStream } from 'minio'
|
||||
|
||||
import core, { withContext, type Blob, type MeasureContext, type Ref, type WorkspaceDataId } from '@hcengineering/core'
|
||||
import core, {
|
||||
withContext,
|
||||
type WorkspaceIds,
|
||||
type WorkspaceDataId,
|
||||
type Blob,
|
||||
type MeasureContext,
|
||||
type Ref,
|
||||
type WorkspaceUuid
|
||||
} from '@hcengineering/core'
|
||||
import { getMetadata } from '@hcengineering/platform'
|
||||
import serverCore, {
|
||||
removeAllObjects,
|
||||
getDataId,
|
||||
type BlobStorageIterator,
|
||||
type BucketInfo,
|
||||
type ListBlobResult,
|
||||
@ -61,28 +70,28 @@ export class MinioService implements StorageAdapter {
|
||||
})
|
||||
}
|
||||
|
||||
async initialize (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {}
|
||||
async initialize (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
getBucketId (dataId: WorkspaceDataId): string {
|
||||
return this.opt.rootBucket ?? (this.opt.bucketPrefix ?? '') + dataId
|
||||
getBucketId (wsIds: WorkspaceIds): string {
|
||||
return this.opt.rootBucket ?? (this.opt.bucketPrefix ?? '') + getDataId(wsIds)
|
||||
}
|
||||
|
||||
getBucketFolder (dataId: WorkspaceDataId): string {
|
||||
return dataId
|
||||
getBucketFolder (wsIds: WorkspaceIds): string {
|
||||
return getDataId(wsIds)
|
||||
}
|
||||
|
||||
async close (): Promise<void> {}
|
||||
async exists (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<boolean> {
|
||||
return await this.client.bucketExists(this.getBucketId(dataId))
|
||||
async exists (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<boolean> {
|
||||
return await this.client.bucketExists(this.getBucketId(wsIds))
|
||||
}
|
||||
|
||||
@withContext('make')
|
||||
async make (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {
|
||||
async make (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {
|
||||
try {
|
||||
await this.client.makeBucket(this.getBucketId(dataId), this.opt.region ?? 'us-east-1')
|
||||
await this.client.makeBucket(this.getBucketId(wsIds), this.opt.region ?? 'us-east-1')
|
||||
} catch (err: any) {
|
||||
if (err.code === 'BucketAlreadyOwnedByYou') {
|
||||
return
|
||||
@ -111,12 +120,17 @@ export class MinioService implements StorageAdapter {
|
||||
stream.on('data', (data) => {
|
||||
const wsDataId = data.prefix?.split('/')?.[0] as WorkspaceDataId
|
||||
if (wsDataId !== undefined && !info.has(wsDataId)) {
|
||||
const wsIds = {
|
||||
uuid: wsDataId as unknown as WorkspaceUuid,
|
||||
dataId: wsDataId,
|
||||
url: ''
|
||||
}
|
||||
info.set(wsDataId, {
|
||||
name: wsDataId,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, wsDataId)
|
||||
await this.delete(ctx, wsIds)
|
||||
},
|
||||
list: async () => await this.listStream(ctx, wsDataId)
|
||||
list: async () => await this.listStream(ctx, wsIds)
|
||||
})
|
||||
}
|
||||
})
|
||||
@ -124,44 +138,49 @@ export class MinioService implements StorageAdapter {
|
||||
stream.destroy()
|
||||
return Array.from(info.values())
|
||||
} else {
|
||||
const productPostfix = this.getBucketFolder('' as WorkspaceDataId)
|
||||
const productPostfix = this.getBucketFolder({ uuid: '' as WorkspaceUuid, dataId: '' as WorkspaceDataId, url: '' })
|
||||
const buckets = await this.client.listBuckets()
|
||||
return buckets
|
||||
.filter((it) => it.name.endsWith(productPostfix))
|
||||
.map((it) => {
|
||||
let name = it.name as WorkspaceDataId
|
||||
name = name.slice(0, name.length - productPostfix.length) as WorkspaceDataId
|
||||
const wsIds = {
|
||||
uuid: name as unknown as WorkspaceUuid,
|
||||
dataId: name,
|
||||
url: ''
|
||||
}
|
||||
return {
|
||||
name,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, name)
|
||||
await this.delete(ctx, wsIds)
|
||||
},
|
||||
list: async () => await this.listStream(ctx, name)
|
||||
list: async () => await this.listStream(ctx, wsIds)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
getDocumentKey (workspace: WorkspaceDataId, name: string): string {
|
||||
return this.opt.rootBucket === undefined ? name : `${this.getBucketFolder(workspace)}/${name}`
|
||||
getDocumentKey (wsIds: WorkspaceIds, name: string): string {
|
||||
return this.opt.rootBucket === undefined ? name : `${this.getBucketFolder(wsIds)}/${name}`
|
||||
}
|
||||
|
||||
@withContext('remove')
|
||||
async remove (ctx: MeasureContext, dataId: WorkspaceDataId, objectNames: string[]): Promise<void> {
|
||||
const toRemove = objectNames.map((it) => this.getDocumentKey(dataId, it))
|
||||
await this.client.removeObjects(this.getBucketId(dataId), toRemove)
|
||||
async remove (ctx: MeasureContext, wsIds: WorkspaceIds, objectNames: string[]): Promise<void> {
|
||||
const toRemove = objectNames.map((it) => this.getDocumentKey(wsIds, it))
|
||||
await this.client.removeObjects(this.getBucketId(wsIds), toRemove)
|
||||
}
|
||||
|
||||
@withContext('delete')
|
||||
async delete (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {
|
||||
async delete (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {
|
||||
try {
|
||||
await removeAllObjects(ctx, this, dataId)
|
||||
await removeAllObjects(ctx, this, wsIds)
|
||||
} catch (err: any) {
|
||||
ctx.error('failed t oclean all objecrs', { error: err })
|
||||
}
|
||||
if (this.opt.rootBucket === undefined) {
|
||||
// Also delete a bucket
|
||||
await this.client.removeBucket(this.getBucketId(dataId))
|
||||
await this.client.removeBucket(this.getBucketId(wsIds))
|
||||
}
|
||||
}
|
||||
|
||||
@ -172,12 +191,12 @@ export class MinioService implements StorageAdapter {
|
||||
return key
|
||||
}
|
||||
|
||||
rootPrefix (dataId: WorkspaceDataId): string | undefined {
|
||||
return this.opt.rootBucket !== undefined ? this.getBucketFolder(dataId) + '/' : undefined
|
||||
rootPrefix (wsIds: WorkspaceIds): string | undefined {
|
||||
return this.opt.rootBucket !== undefined ? this.getBucketFolder(wsIds) + '/' : undefined
|
||||
}
|
||||
|
||||
@withContext('listStream')
|
||||
async listStream (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<BlobStorageIterator> {
|
||||
async listStream (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<BlobStorageIterator> {
|
||||
let hasMore = true
|
||||
let stream: BucketStream<BucketItem> | undefined
|
||||
let done = false
|
||||
@ -185,13 +204,13 @@ export class MinioService implements StorageAdapter {
|
||||
let onNext: () => void = () => {}
|
||||
const buffer: ListBlobResult[] = []
|
||||
|
||||
const rootPrefix = this.rootPrefix(dataId)
|
||||
const rootPrefix = this.rootPrefix(wsIds)
|
||||
return {
|
||||
next: async (): Promise<ListBlobResult[]> => {
|
||||
try {
|
||||
if (stream === undefined && !done) {
|
||||
const rprefix = rootPrefix ?? ''
|
||||
stream = this.client.listObjects(this.getBucketId(dataId), rprefix, true)
|
||||
stream = this.client.listObjects(this.getBucketId(wsIds), rprefix, true)
|
||||
stream.on('end', () => {
|
||||
stream?.destroy()
|
||||
done = true
|
||||
@ -259,10 +278,10 @@ export class MinioService implements StorageAdapter {
|
||||
}
|
||||
|
||||
@withContext('stat')
|
||||
async stat (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Blob | undefined> {
|
||||
async stat (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Blob | undefined> {
|
||||
try {
|
||||
const result = await this.client.statObject(this.getBucketId(dataId), this.getDocumentKey(dataId, objectName))
|
||||
const rootPrefix = this.rootPrefix(dataId)
|
||||
const result = await this.client.statObject(this.getBucketId(wsIds), this.getDocumentKey(wsIds, objectName))
|
||||
const rootPrefix = this.rootPrefix(wsIds)
|
||||
return {
|
||||
provider: '',
|
||||
_class: core.class.Blob,
|
||||
@ -286,38 +305,32 @@ export class MinioService implements StorageAdapter {
|
||||
// Do not print error in this case
|
||||
return
|
||||
}
|
||||
ctx.error('no object found', { error: err, objectName, dataId })
|
||||
ctx.error('no object found', { error: err, objectName, wsIds })
|
||||
}
|
||||
}
|
||||
|
||||
@withContext('get')
|
||||
async get (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Readable> {
|
||||
return await this.client.getObject(this.getBucketId(dataId), this.getDocumentKey(dataId, objectName))
|
||||
async get (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Readable> {
|
||||
return await this.client.getObject(this.getBucketId(wsIds), this.getDocumentKey(wsIds, objectName))
|
||||
}
|
||||
|
||||
@withContext('put')
|
||||
async put (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
stream: Readable | Buffer | string,
|
||||
contentType: string,
|
||||
size?: number
|
||||
): Promise<UploadedObjectInfo> {
|
||||
return await this.client.putObject(
|
||||
this.getBucketId(dataId),
|
||||
this.getDocumentKey(dataId, objectName),
|
||||
stream,
|
||||
size,
|
||||
{
|
||||
'Content-Type': contentType
|
||||
}
|
||||
)
|
||||
return await this.client.putObject(this.getBucketId(wsIds), this.getDocumentKey(wsIds, objectName), stream, size, {
|
||||
'Content-Type': contentType
|
||||
})
|
||||
}
|
||||
|
||||
@withContext('read')
|
||||
async read (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Buffer[]> {
|
||||
const data = await this.client.getObject(this.getBucketId(dataId), this.getDocumentKey(dataId, objectName))
|
||||
async read (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Buffer[]> {
|
||||
const data = await this.client.getObject(this.getBucketId(wsIds), this.getDocumentKey(wsIds, objectName))
|
||||
const chunks: Buffer[] = []
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
@ -343,23 +356,23 @@ export class MinioService implements StorageAdapter {
|
||||
@withContext('partial')
|
||||
async partial (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
offset: number,
|
||||
length?: number
|
||||
): Promise<Readable> {
|
||||
return await this.client.getPartialObject(
|
||||
this.getBucketId(dataId),
|
||||
this.getDocumentKey(dataId, objectName),
|
||||
this.getBucketId(wsIds),
|
||||
this.getDocumentKey(wsIds, objectName),
|
||||
offset,
|
||||
length
|
||||
)
|
||||
}
|
||||
|
||||
@withContext('getUrl')
|
||||
async getUrl (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<string> {
|
||||
async getUrl (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<string> {
|
||||
const filesUrl = getMetadata(serverCore.metadata.FilesUrl) ?? ''
|
||||
return filesUrl.replaceAll(':workspace', dataId).replaceAll(':blobId', objectName)
|
||||
return filesUrl.replaceAll(':workspace', getDataId(wsIds)).replaceAll(':blobId', objectName)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -13,7 +13,7 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { MeasureMetricsContext, type WorkspaceDataId, generateId } from '@hcengineering/core'
|
||||
import { MeasureMetricsContext, type WorkspaceDataId, type WorkspaceUuid, generateId } from '@hcengineering/core'
|
||||
import { objectsToArray, type StorageConfiguration } from '@hcengineering/server-core'
|
||||
import { S3Service, processConfigFromEnv, type S3Config } from '..'
|
||||
|
||||
@ -36,33 +36,41 @@ describe('s3 operations', () => {
|
||||
await b.delete()
|
||||
}
|
||||
|
||||
const genWorkspaceId1 = generateId()
|
||||
const genWorkspaceId2 = generateId()
|
||||
const genWorkspaceId1 = generateId() as unknown as WorkspaceDataId
|
||||
const genWorkspaceId2 = generateId() as unknown as WorkspaceDataId
|
||||
|
||||
expect(genWorkspaceId1).not.toEqual(genWorkspaceId2)
|
||||
|
||||
const ws1 = genWorkspaceId1 as unknown as WorkspaceDataId
|
||||
const ws2 = genWorkspaceId2 as unknown as WorkspaceDataId
|
||||
await minioService.make(toolCtx, ws1)
|
||||
await minioService.make(toolCtx, ws2)
|
||||
const wsIds1 = {
|
||||
uuid: genWorkspaceId1 as unknown as WorkspaceUuid,
|
||||
dataId: genWorkspaceId1,
|
||||
url: ''
|
||||
}
|
||||
const wsIds2 = {
|
||||
uuid: genWorkspaceId2 as unknown as WorkspaceUuid,
|
||||
dataId: genWorkspaceId2,
|
||||
url: ''
|
||||
}
|
||||
await minioService.make(toolCtx, wsIds1)
|
||||
await minioService.make(toolCtx, wsIds2)
|
||||
|
||||
const v1 = generateId()
|
||||
await minioService.put(toolCtx, ws1, 'obj1.txt', v1, 'text/plain')
|
||||
await minioService.put(toolCtx, ws2, 'obj2.txt', v1, 'text/plain')
|
||||
await minioService.put(toolCtx, wsIds1, 'obj1.txt', v1, 'text/plain')
|
||||
await minioService.put(toolCtx, wsIds2, 'obj2.txt', v1, 'text/plain')
|
||||
|
||||
const w1Objects = await objectsToArray(toolCtx, minioService, ws1)
|
||||
const w1Objects = await objectsToArray(toolCtx, minioService, wsIds1)
|
||||
expect(w1Objects.map((it) => it._id)).toEqual(['obj1.txt'])
|
||||
|
||||
const w2Objects = await objectsToArray(toolCtx, minioService, ws2)
|
||||
const w2Objects = await objectsToArray(toolCtx, minioService, wsIds2)
|
||||
expect(w2Objects.map((it) => it._id)).toEqual(['obj2.txt'])
|
||||
|
||||
await minioService.put(toolCtx, ws1, 'obj1.txt', 'obj1', 'text/plain')
|
||||
await minioService.put(toolCtx, ws1, 'obj2.txt', 'obj2', 'text/plain')
|
||||
await minioService.put(toolCtx, wsIds1, 'obj1.txt', 'obj1', 'text/plain')
|
||||
await minioService.put(toolCtx, wsIds1, 'obj2.txt', 'obj2', 'text/plain')
|
||||
|
||||
const w1Objects2 = await objectsToArray(toolCtx, minioService, ws1)
|
||||
const w1Objects2 = await objectsToArray(toolCtx, minioService, wsIds1)
|
||||
expect(w1Objects2.map((it) => it._id)).toEqual(['obj1.txt', 'obj2.txt'])
|
||||
|
||||
const read = (await minioService.read(toolCtx, ws1, 'obj1.txt')) as unknown as Uint8Array[]
|
||||
const read = (await minioService.read(toolCtx, wsIds1, 'obj1.txt')) as unknown as Uint8Array[]
|
||||
const data = Buffer.concat(read)
|
||||
|
||||
expect('obj1').toEqual(data.toString())
|
||||
|
@ -19,10 +19,19 @@ import { NodeHttpHandler } from '@smithy/node-http-handler'
|
||||
import { Agent as HttpAgent } from 'http'
|
||||
import { Agent as HttpsAgent } from 'https'
|
||||
|
||||
import core, { withContext, type Blob, type MeasureContext, type Ref, type WorkspaceDataId } from '@hcengineering/core'
|
||||
import core, {
|
||||
withContext,
|
||||
type WorkspaceIds,
|
||||
type Blob,
|
||||
type MeasureContext,
|
||||
type Ref,
|
||||
type WorkspaceDataId,
|
||||
type WorkspaceUuid
|
||||
} from '@hcengineering/core'
|
||||
import { getMetadata } from '@hcengineering/platform'
|
||||
import serverCore, {
|
||||
NoSuchKeyError,
|
||||
getDataId,
|
||||
type BlobStorageIterator,
|
||||
type ListBlobResult,
|
||||
type StorageAdapter,
|
||||
@ -80,25 +89,25 @@ export class S3Service implements StorageAdapter {
|
||||
this.expireTime = parseInt(this.opt.expireTime ?? '168') * 3600 // use 7 * 24 - hours as default value for expireF
|
||||
}
|
||||
|
||||
async initialize (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {}
|
||||
async initialize (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
getBucketId (dataId: WorkspaceDataId): string {
|
||||
return this.opt.rootBucket ?? (this.opt.bucketPrefix ?? '') + dataId
|
||||
getBucketId (wsIds: WorkspaceIds): string {
|
||||
return this.opt.rootBucket ?? (this.opt.bucketPrefix ?? '') + getDataId(wsIds)
|
||||
}
|
||||
|
||||
getBucketFolder (dataId: WorkspaceDataId): string {
|
||||
return dataId
|
||||
getBucketFolder (wsIds: WorkspaceIds): string {
|
||||
return getDataId(wsIds)
|
||||
}
|
||||
|
||||
async close (): Promise<void> {}
|
||||
|
||||
async exists (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<boolean> {
|
||||
async exists (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<boolean> {
|
||||
try {
|
||||
const result = await this.client.headBucket({
|
||||
Bucket: this.getBucketId(dataId)
|
||||
Bucket: this.getBucketId(wsIds)
|
||||
})
|
||||
return result.$metadata.httpStatusCode === 200
|
||||
} catch (err: any) {
|
||||
@ -112,10 +121,10 @@ export class S3Service implements StorageAdapter {
|
||||
}
|
||||
|
||||
@withContext('make')
|
||||
async make (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {
|
||||
async make (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {
|
||||
try {
|
||||
await this.client.createBucket({
|
||||
Bucket: this.getBucketId(dataId)
|
||||
Bucket: this.getBucketId(wsIds)
|
||||
})
|
||||
} catch (err: any) {
|
||||
if (err.Code === 'BucketAlreadyOwnedByYou') {
|
||||
@ -140,13 +149,18 @@ export class S3Service implements StorageAdapter {
|
||||
})
|
||||
for (const data of res.CommonPrefixes ?? []) {
|
||||
const wsDataId = data.Prefix?.split('/')?.[0] as WorkspaceDataId
|
||||
const wsIds = {
|
||||
uuid: wsDataId as unknown as WorkspaceUuid,
|
||||
dataId: wsDataId,
|
||||
url: ''
|
||||
}
|
||||
if (wsDataId !== undefined && !info.has(wsDataId)) {
|
||||
info.set(wsDataId, {
|
||||
name: wsDataId,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, wsDataId)
|
||||
await this.delete(ctx, wsIds)
|
||||
},
|
||||
list: async () => await this.listStream(ctx, wsDataId)
|
||||
list: async () => await this.listStream(ctx, wsIds)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -158,19 +172,28 @@ export class S3Service implements StorageAdapter {
|
||||
}
|
||||
return Array.from(info.values())
|
||||
} else {
|
||||
const productPostfix = this.getBucketFolder('' as WorkspaceDataId)
|
||||
const productPostfix = this.getBucketFolder({
|
||||
uuid: '' as WorkspaceUuid,
|
||||
dataId: '' as WorkspaceDataId,
|
||||
url: ''
|
||||
})
|
||||
const buckets = await this.client.listBuckets()
|
||||
return (buckets.Buckets ?? [])
|
||||
.filter((it) => it.Name !== undefined && it.Name.endsWith(productPostfix))
|
||||
.map((it) => {
|
||||
let name = (it.Name ?? '') as WorkspaceDataId
|
||||
name = name.slice(0, name.length - productPostfix.length) as WorkspaceDataId
|
||||
const wsIds = {
|
||||
uuid: name as unknown as WorkspaceUuid,
|
||||
dataId: name,
|
||||
url: ''
|
||||
}
|
||||
return {
|
||||
name,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, name)
|
||||
await this.delete(ctx, wsIds)
|
||||
},
|
||||
list: async () => await this.listStream(ctx, name)
|
||||
list: async () => await this.listStream(ctx, wsIds)
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -184,31 +207,31 @@ export class S3Service implements StorageAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
getDocumentKey (workspace: WorkspaceDataId, name: string): string {
|
||||
return this.opt.rootBucket === undefined ? name : `${this.getBucketFolder(workspace)}/${name}`
|
||||
getDocumentKey (wsIds: WorkspaceIds, name: string): string {
|
||||
return this.opt.rootBucket === undefined ? name : `${this.getBucketFolder(wsIds)}/${name}`
|
||||
}
|
||||
|
||||
@withContext('remove')
|
||||
async remove (ctx: MeasureContext, dataId: WorkspaceDataId, objectNames: string[]): Promise<void> {
|
||||
async remove (ctx: MeasureContext, wsIds: WorkspaceIds, objectNames: string[]): Promise<void> {
|
||||
await this.client.deleteObjects({
|
||||
Bucket: this.getBucketId(dataId),
|
||||
Bucket: this.getBucketId(wsIds),
|
||||
Delete: {
|
||||
Objects: objectNames.map((it) => ({ Key: this.getDocumentKey(dataId, it) }))
|
||||
Objects: objectNames.map((it) => ({ Key: this.getDocumentKey(wsIds, it) }))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@withContext('delete')
|
||||
async delete (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {
|
||||
async delete (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {
|
||||
try {
|
||||
await removeAllObjects(ctx, this, dataId)
|
||||
await removeAllObjects(ctx, this, wsIds)
|
||||
} catch (err: any) {
|
||||
ctx.error('failed t oclean all objecrs', { error: err })
|
||||
}
|
||||
if (this.opt.rootBucket === undefined) {
|
||||
// We should also delete bucket
|
||||
await this.client.deleteBucket({
|
||||
Bucket: this.getBucketId(dataId)
|
||||
Bucket: this.getBucketId(wsIds)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -220,11 +243,11 @@ export class S3Service implements StorageAdapter {
|
||||
return key
|
||||
}
|
||||
|
||||
rootPrefix (dataId: WorkspaceDataId): string | undefined {
|
||||
return this.opt.rootBucket !== undefined ? this.getBucketFolder(dataId) + '/' : undefined
|
||||
rootPrefix (wsIds: WorkspaceIds): string | undefined {
|
||||
return this.opt.rootBucket !== undefined ? this.getBucketFolder(wsIds) + '/' : undefined
|
||||
}
|
||||
|
||||
async copy (sourceId: WorkspaceDataId, targetId: WorkspaceDataId, objectName: string): Promise<void> {
|
||||
async copy (sourceId: WorkspaceIds, targetId: WorkspaceIds, objectName: string): Promise<void> {
|
||||
const copyOp = new CopyObjectCommand({
|
||||
Bucket: this.getBucketId(targetId),
|
||||
Key: this.getDocumentKey(targetId, objectName),
|
||||
@ -234,18 +257,18 @@ export class S3Service implements StorageAdapter {
|
||||
}
|
||||
|
||||
@withContext('listStream')
|
||||
async listStream (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<BlobStorageIterator> {
|
||||
async listStream (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<BlobStorageIterator> {
|
||||
let hasMore = true
|
||||
const buffer: ListBlobResult[] = []
|
||||
let token: string | undefined
|
||||
|
||||
const rootPrefix = this.rootPrefix(dataId)
|
||||
const rootPrefix = this.rootPrefix(wsIds)
|
||||
return {
|
||||
next: async (): Promise<ListBlobResult[]> => {
|
||||
try {
|
||||
while (hasMore && buffer.length < 50) {
|
||||
const res = await this.client.listObjectsV2({
|
||||
Bucket: this.getBucketId(dataId),
|
||||
Bucket: this.getBucketId(wsIds),
|
||||
Prefix: rootPrefix ?? '',
|
||||
ContinuationToken: token
|
||||
})
|
||||
@ -270,7 +293,7 @@ export class S3Service implements StorageAdapter {
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
ctx.error('Failed to get list', { error: err, dataId })
|
||||
ctx.error('Failed to get list', { error: err, wsIds })
|
||||
}
|
||||
return buffer.splice(0, 50)
|
||||
},
|
||||
@ -279,13 +302,13 @@ export class S3Service implements StorageAdapter {
|
||||
}
|
||||
|
||||
@withContext('stat')
|
||||
async stat (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Blob | undefined> {
|
||||
async stat (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Blob | undefined> {
|
||||
try {
|
||||
const result = await this.client.headObject({
|
||||
Bucket: this.getBucketId(dataId),
|
||||
Key: this.getDocumentKey(dataId, objectName)
|
||||
Bucket: this.getBucketId(wsIds),
|
||||
Key: this.getDocumentKey(wsIds, objectName)
|
||||
})
|
||||
const rootPrefix = this.rootPrefix(dataId)
|
||||
const rootPrefix = this.rootPrefix(wsIds)
|
||||
return {
|
||||
provider: '',
|
||||
_class: core.class.Blob,
|
||||
@ -300,21 +323,21 @@ export class S3Service implements StorageAdapter {
|
||||
}
|
||||
} catch (err: any) {
|
||||
if (err?.$metadata?.httpStatusCode !== 404) {
|
||||
ctx.warn('no object found', { error: err, objectName, dataId })
|
||||
ctx.warn('no object found', { error: err, objectName, wsIds })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@withContext('get')
|
||||
async get (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Readable> {
|
||||
return await this.doGet(ctx, dataId, objectName)
|
||||
async get (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Readable> {
|
||||
return await this.doGet(ctx, wsIds, objectName)
|
||||
}
|
||||
|
||||
async doGet (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string, range?: string): Promise<Readable> {
|
||||
async doGet (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string, range?: string): Promise<Readable> {
|
||||
try {
|
||||
const res = await this.client.getObject({
|
||||
Bucket: this.getBucketId(dataId),
|
||||
Key: this.getDocumentKey(dataId, objectName),
|
||||
Bucket: this.getBucketId(wsIds),
|
||||
Key: this.getDocumentKey(wsIds, objectName),
|
||||
Range: range
|
||||
})
|
||||
|
||||
@ -330,14 +353,14 @@ export class S3Service implements StorageAdapter {
|
||||
}
|
||||
} catch (err: any) {
|
||||
// In case of error return undefined
|
||||
throw new NoSuchKeyError(`${dataId} missing ${objectName}`, err)
|
||||
throw new NoSuchKeyError(`uuid=${wsIds.uuid} dataId=${wsIds.dataId} missing ${objectName}`, err)
|
||||
}
|
||||
}
|
||||
|
||||
@withContext('put')
|
||||
put (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
stream: Readable | Buffer | string,
|
||||
contentType: string,
|
||||
@ -349,8 +372,8 @@ export class S3Service implements StorageAdapter {
|
||||
{},
|
||||
async () => {
|
||||
const cmd = new PutObjectCommand({
|
||||
Bucket: this.getBucketId(dataId),
|
||||
Key: this.getDocumentKey(dataId, objectName),
|
||||
Bucket: this.getBucketId(wsIds),
|
||||
Key: this.getDocumentKey(wsIds, objectName),
|
||||
ContentType: contentType,
|
||||
ContentLength: size,
|
||||
Body: stream
|
||||
@ -361,7 +384,7 @@ export class S3Service implements StorageAdapter {
|
||||
versionId: response.VersionId ?? null
|
||||
}
|
||||
},
|
||||
{ size, objectName, dataId }
|
||||
{ size, objectName, wsIds }
|
||||
)
|
||||
// Less 5Mb
|
||||
} else {
|
||||
@ -372,8 +395,8 @@ export class S3Service implements StorageAdapter {
|
||||
const uploadTask = new Upload({
|
||||
client: this.client,
|
||||
params: {
|
||||
Bucket: this.getBucketId(dataId),
|
||||
Key: this.getDocumentKey(dataId, objectName),
|
||||
Bucket: this.getBucketId(wsIds),
|
||||
Key: this.getDocumentKey(wsIds, objectName),
|
||||
ContentType: contentType,
|
||||
Body: stream
|
||||
},
|
||||
@ -392,14 +415,14 @@ export class S3Service implements StorageAdapter {
|
||||
versionId: output.VersionId ?? null
|
||||
}
|
||||
},
|
||||
{ size, objectName, dataId }
|
||||
{ size, objectName, wsIds }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@withContext('read')
|
||||
async read (ctx: MeasureContext, dataId: WorkspaceDataId, name: string): Promise<Buffer[]> {
|
||||
const data = await this.doGet(ctx, dataId, name)
|
||||
async read (ctx: MeasureContext, wsIds: WorkspaceIds, name: string): Promise<Buffer[]> {
|
||||
const data = await this.doGet(ctx, wsIds, name)
|
||||
const chunks: Buffer[] = []
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
@ -422,19 +445,19 @@ export class S3Service implements StorageAdapter {
|
||||
@withContext('partial')
|
||||
async partial (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
offset: number,
|
||||
length?: number
|
||||
): Promise<Readable> {
|
||||
const range = length !== undefined ? `bytes=${offset}-${offset + length}` : `bytes=${offset}-`
|
||||
return await this.doGet(ctx, dataId, objectName, range)
|
||||
return await this.doGet(ctx, wsIds, objectName, range)
|
||||
}
|
||||
|
||||
@withContext('getUrl')
|
||||
async getUrl (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<string> {
|
||||
async getUrl (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<string> {
|
||||
const filesUrl = getMetadata(serverCore.metadata.FilesUrl) ?? ''
|
||||
return filesUrl.replaceAll(':workspace', dataId).replaceAll(':blobId', objectName)
|
||||
return filesUrl.replaceAll(':workspace', getDataId(wsIds)).replaceAll(':blobId', objectName)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { MeasureMetricsContext, type WorkspaceDataId, generateId } from '@hcengineering/core'
|
||||
import { MeasureMetricsContext, type WorkspaceDataId, type WorkspaceUuid, generateId } from '@hcengineering/core'
|
||||
import type { StorageConfiguration } from '@hcengineering/server-core'
|
||||
import { S3Service, processConfigFromEnv, type S3Config } from '.'
|
||||
|
||||
@ -21,10 +21,14 @@ async function doTest (): Promise<void> {
|
||||
await b.delete()
|
||||
}
|
||||
|
||||
const genWorkspaceId1 = generateId()
|
||||
const genWorkspaceId1 = generateId() as unknown as WorkspaceDataId
|
||||
|
||||
const ws1 = genWorkspaceId1 as unknown as WorkspaceDataId
|
||||
await storageService.make(toolCtx, ws1)
|
||||
const wsIds1 = {
|
||||
uuid: genWorkspaceId1 as unknown as WorkspaceUuid,
|
||||
dataId: genWorkspaceId1,
|
||||
url: ''
|
||||
}
|
||||
await storageService.make(toolCtx, wsIds1)
|
||||
/// /////// Uploads
|
||||
let st1 = Date.now()
|
||||
const sz = 10
|
||||
@ -32,7 +36,7 @@ async function doTest (): Promise<void> {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
// We need 1Mb random file to check upload speed.
|
||||
const st = Date.now()
|
||||
await storageService.put(toolCtx, ws1, `testObject.${i}`, stream, 'application/octet-stream', stream.length)
|
||||
await storageService.put(toolCtx, wsIds1, `testObject.${i}`, stream, 'application/octet-stream', stream.length)
|
||||
console.log('upload time', Date.now() - st)
|
||||
}
|
||||
let now = Date.now()
|
||||
@ -43,7 +47,7 @@ async function doTest (): Promise<void> {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
// We need 1Mb random file to check upload speed.
|
||||
const st = Date.now()
|
||||
await storageService.read(toolCtx, ws1, `testObject.${i}`)
|
||||
await storageService.read(toolCtx, wsIds1, `testObject.${i}`)
|
||||
console.log('download time', Date.now() - st)
|
||||
}
|
||||
|
||||
@ -55,7 +59,7 @@ async function doTest (): Promise<void> {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
// We need 1Mb random file to check upload speed.
|
||||
const st = Date.now()
|
||||
const readable = await storageService.get(toolCtx, ws1, `testObject.${i}`)
|
||||
const readable = await storageService.get(toolCtx, wsIds1, `testObject.${i}`)
|
||||
const chunks: Buffer[] = []
|
||||
readable.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
@ -78,7 +82,7 @@ async function doTest (): Promise<void> {
|
||||
// We need 1Mb random file to check upload speed.
|
||||
const st = Date.now()
|
||||
for (let i = 0; i < sz; i++) {
|
||||
const readable = await storageService.partial(toolCtx, ws1, `testObject.${i}`, i * MB, MB)
|
||||
const readable = await storageService.partial(toolCtx, wsIds1, `testObject.${i}`, i * MB, MB)
|
||||
const chunks: Buffer[] = []
|
||||
readable.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
|
@ -33,7 +33,6 @@ import {
|
||||
type TxResult,
|
||||
type Blob,
|
||||
type WorkspaceIds,
|
||||
type WorkspaceDataId,
|
||||
generateId
|
||||
} from '@hcengineering/core'
|
||||
import { PlatformError, unknownError } from '@hcengineering/platform'
|
||||
@ -46,7 +45,7 @@ import {
|
||||
|
||||
class StorageBlobAdapter implements DbAdapter {
|
||||
constructor (
|
||||
readonly storageId: WorkspaceDataId,
|
||||
readonly storageIds: WorkspaceIds,
|
||||
readonly client: StorageAdapterEx, // Should not be closed
|
||||
readonly ctx: MeasureContext
|
||||
) {}
|
||||
@ -110,13 +109,13 @@ class StorageBlobAdapter implements DbAdapter {
|
||||
async close (): Promise<void> {}
|
||||
|
||||
find (ctx: MeasureContext, domain: Domain): StorageIterator {
|
||||
return this.client.find(ctx, this.storageId)
|
||||
return this.client.find(ctx, this.storageIds)
|
||||
}
|
||||
|
||||
async load (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
|
||||
const blobs: Blob[] = []
|
||||
for (const d of docs) {
|
||||
const bb = await this.client.stat(ctx, this.storageId, d)
|
||||
const bb = await this.client.stat(ctx, this.storageIds, d)
|
||||
if (bb !== undefined) {
|
||||
blobs.push(bb)
|
||||
}
|
||||
@ -134,7 +133,7 @@ class StorageBlobAdapter implements DbAdapter {
|
||||
}
|
||||
|
||||
async clean (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<void> {
|
||||
await this.client.remove(this.ctx, this.storageId, docs)
|
||||
await this.client.remove(this.ctx, this.storageIds, docs)
|
||||
}
|
||||
|
||||
async update (ctx: MeasureContext, domain: Domain, operations: Map<Ref<Doc>, DocumentUpdate<Doc>>): Promise<void> {}
|
||||
@ -148,17 +147,16 @@ export async function createStorageDataAdapter (
|
||||
contextVars: Record<string, any>,
|
||||
hierarchy: Hierarchy,
|
||||
url: string,
|
||||
workspaceId: WorkspaceIds,
|
||||
wsIds: WorkspaceIds,
|
||||
modelDb: ModelDb,
|
||||
storage?: StorageAdapter
|
||||
): Promise<DbAdapter> {
|
||||
if (storage === undefined) {
|
||||
throw new Error('Storage adapter required')
|
||||
}
|
||||
const storageId = workspaceId.dataId ?? (workspaceId.uuid as unknown as WorkspaceDataId)
|
||||
// We need to create bucket if it doesn't exist
|
||||
if (!(await storage.exists(ctx, storageId))) {
|
||||
await storage.make(ctx, storageId)
|
||||
if (!(await storage.exists(ctx, wsIds))) {
|
||||
await storage.make(ctx, wsIds)
|
||||
}
|
||||
return new StorageBlobAdapter(storageId, storage as StorageAdapterEx, ctx)
|
||||
return new StorageBlobAdapter(wsIds, storage as StorageAdapterEx, ctx)
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
import {
|
||||
withContext,
|
||||
type WorkspaceIds,
|
||||
type Blob,
|
||||
type MeasureContext,
|
||||
type StorageIterator,
|
||||
type WorkspaceDataId
|
||||
type StorageIterator
|
||||
} from '@hcengineering/core'
|
||||
import { type Readable } from 'stream'
|
||||
|
||||
@ -19,7 +19,7 @@ import {
|
||||
} from '@hcengineering/storage'
|
||||
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
import serverCore, { type StorageConfig, type StorageConfiguration } from '@hcengineering/server-core'
|
||||
import serverCore, { getDataId, type StorageConfig, type StorageConfiguration } from '@hcengineering/server-core'
|
||||
|
||||
class NoSuchKeyError extends Error {
|
||||
code: string
|
||||
@ -36,7 +36,7 @@ export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx
|
||||
// Adapters should be in reverse order, first one is target one, and next ones are for fallback
|
||||
constructor (readonly adapters: NamedStorageAdapter[]) {}
|
||||
|
||||
async initialize (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {}
|
||||
async initialize (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {}
|
||||
|
||||
doTrimHash (s: string | undefined): string {
|
||||
if (s == null) {
|
||||
@ -48,8 +48,8 @@ export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx
|
||||
return s
|
||||
}
|
||||
|
||||
find (ctx: MeasureContext, dataId: WorkspaceDataId): StorageIterator {
|
||||
const storageIterator = this.makeStorageIterator(ctx, dataId)
|
||||
find (ctx: MeasureContext, wsIds: WorkspaceIds): StorageIterator {
|
||||
const storageIterator = this.makeStorageIterator(ctx, wsIds)
|
||||
|
||||
return {
|
||||
next: async () => {
|
||||
@ -67,7 +67,7 @@ export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx
|
||||
}
|
||||
}
|
||||
|
||||
private makeStorageIterator (ctx: MeasureContext, dataId: WorkspaceDataId): BlobStorageIterator {
|
||||
private makeStorageIterator (ctx: MeasureContext, wsIds: WorkspaceIds): BlobStorageIterator {
|
||||
// We need to reverse, since we need to iterate on latest document last
|
||||
const adapters = [...this.adapters].reverse()
|
||||
let provider: NamedStorageAdapter | undefined
|
||||
@ -77,7 +77,7 @@ export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx
|
||||
while (true) {
|
||||
if (iterator === undefined && adapters.length > 0) {
|
||||
provider = adapters.shift() as NamedStorageAdapter
|
||||
iterator = await provider.adapter.listStream(ctx, dataId)
|
||||
iterator = await provider.adapter.listStream(ctx, wsIds)
|
||||
}
|
||||
if (iterator === undefined) {
|
||||
return []
|
||||
@ -111,9 +111,9 @@ export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx
|
||||
}
|
||||
}
|
||||
|
||||
async exists (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<boolean> {
|
||||
async exists (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<boolean> {
|
||||
for (const { adapter } of this.adapters) {
|
||||
if (!(await adapter.exists(ctx, dataId))) {
|
||||
if (!(await adapter.exists(ctx, wsIds))) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
@ -121,14 +121,14 @@ export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx
|
||||
}
|
||||
|
||||
@withContext('aggregator-make', {})
|
||||
async make (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {
|
||||
async make (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {
|
||||
for (const { name, adapter } of this.adapters) {
|
||||
try {
|
||||
if (!(await adapter.exists(ctx, dataId))) {
|
||||
await adapter.make(ctx, dataId)
|
||||
if (!(await adapter.exists(ctx, wsIds))) {
|
||||
await adapter.make(ctx, wsIds)
|
||||
}
|
||||
} catch (err: any) {
|
||||
ctx.error('failed to init adapter', { adapter: name, dataId, error: err })
|
||||
ctx.error('failed to init adapter', { adapter: name, wsIds, error: err })
|
||||
// Do not throw error in case default adapter is ok
|
||||
Analytics.handleError(err)
|
||||
}
|
||||
@ -145,24 +145,24 @@ export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx
|
||||
}
|
||||
|
||||
@withContext('fallback-delete', {})
|
||||
async delete (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {
|
||||
async delete (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {
|
||||
for (const { adapter } of this.adapters) {
|
||||
if (await adapter.exists(ctx, dataId)) {
|
||||
await adapter.delete(ctx, dataId)
|
||||
if (await adapter.exists(ctx, wsIds)) {
|
||||
await adapter.delete(ctx, wsIds)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@withContext('fallback-remove', {})
|
||||
async remove (ctx: MeasureContext, dataId: WorkspaceDataId, objectNames: string[]): Promise<void> {
|
||||
async remove (ctx: MeasureContext, wsIds: WorkspaceIds, objectNames: string[]): Promise<void> {
|
||||
// Group by provider and delegate into it.
|
||||
for (const { adapter } of this.adapters) {
|
||||
await adapter.remove(ctx, dataId, objectNames)
|
||||
await adapter.remove(ctx, wsIds, objectNames)
|
||||
}
|
||||
}
|
||||
|
||||
async listStream (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<BlobStorageIterator> {
|
||||
const storageIterator = this.makeStorageIterator(ctx, dataId)
|
||||
async listStream (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<BlobStorageIterator> {
|
||||
const storageIterator = this.makeStorageIterator(ctx, wsIds)
|
||||
return {
|
||||
next: async (): Promise<ListBlobResult[]> => {
|
||||
return await storageIterator.next()
|
||||
@ -174,9 +174,9 @@ export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx
|
||||
}
|
||||
|
||||
@withContext('fallback-stat', {})
|
||||
async stat (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Blob | undefined> {
|
||||
async stat (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Blob | undefined> {
|
||||
for (const { name, adapter } of this.adapters) {
|
||||
const stat = await adapter.stat(ctx, dataId, objectName)
|
||||
const stat = await adapter.stat(ctx, wsIds, objectName)
|
||||
if (stat !== undefined) {
|
||||
stat.provider = name
|
||||
return stat
|
||||
@ -185,51 +185,51 @@ export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx
|
||||
}
|
||||
|
||||
@withContext('fallback-get', {})
|
||||
async get (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Readable> {
|
||||
async get (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Readable> {
|
||||
for (const { adapter } of this.adapters) {
|
||||
try {
|
||||
return await adapter.get(ctx, dataId, objectName)
|
||||
return await adapter.get(ctx, wsIds, objectName)
|
||||
} catch (err: any) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
throw new NoSuchKeyError(`${dataId} missing ${objectName}`)
|
||||
throw new NoSuchKeyError(`uuid=${wsIds.uuid} dataId=${wsIds.dataId} missing ${objectName}`)
|
||||
}
|
||||
|
||||
@withContext('fallback-partial', {})
|
||||
async partial (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
offset: number,
|
||||
length?: number | undefined
|
||||
): Promise<Readable> {
|
||||
for (const { adapter } of this.adapters) {
|
||||
try {
|
||||
return await adapter.partial(ctx, dataId, objectName, offset, length)
|
||||
return await adapter.partial(ctx, wsIds, objectName, offset, length)
|
||||
} catch (err: any) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
throw new NoSuchKeyError(`${dataId} missing ${objectName}`)
|
||||
throw new NoSuchKeyError(`uuid=${wsIds.uuid} dataId=${wsIds.dataId} missing ${objectName}`)
|
||||
}
|
||||
|
||||
@withContext('fallback-read', {})
|
||||
async read (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Buffer[]> {
|
||||
async read (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Buffer[]> {
|
||||
for (const { adapter } of this.adapters) {
|
||||
try {
|
||||
return await adapter.read(ctx, dataId, objectName)
|
||||
return await adapter.read(ctx, wsIds, objectName)
|
||||
} catch (err: any) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
throw new NoSuchKeyError(`${dataId} missing ${objectName}`)
|
||||
throw new NoSuchKeyError(`uuid=${wsIds.uuid} dataId=${wsIds.dataId} missing ${objectName}`)
|
||||
}
|
||||
|
||||
@withContext('aggregator-put', {})
|
||||
put (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
stream: string | Readable | Buffer,
|
||||
contentType: string,
|
||||
@ -237,15 +237,15 @@ export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx
|
||||
): Promise<UploadedObjectInfo> {
|
||||
const adapter = this.adapters[0].adapter
|
||||
// Remove in other storages, if appicable
|
||||
return adapter.put(ctx, dataId, objectName, stream, contentType, size)
|
||||
return adapter.put(ctx, wsIds, objectName, stream, contentType, size)
|
||||
}
|
||||
|
||||
@withContext('aggregator-getUrl', {})
|
||||
async getUrl (ctx: MeasureContext, dataId: WorkspaceDataId, name: string): Promise<string> {
|
||||
// const { provider, stat } = await this.findProvider(ctx, dataId, name)
|
||||
// return await provider.getUrl(ctx, dataId, stat.storageId)
|
||||
async getUrl (ctx: MeasureContext, wsIds: WorkspaceIds, name: string): Promise<string> {
|
||||
// const { provider, stat } = await this.findProvider(ctx, wsIds, name)
|
||||
// return await provider.getUrl(ctx, wsIds, stat.storageId)
|
||||
const filesUrl = getMetadata(serverCore.metadata.FilesUrl) ?? ''
|
||||
return filesUrl.replaceAll(':workspace', dataId).replaceAll(':blobId', name)
|
||||
return filesUrl.replaceAll(':workspace', getDataId(wsIds)).replaceAll(':blobId', name)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,10 @@
|
||||
import { MeasureMetricsContext, type WorkspaceDataId, type MeasureContext } from '@hcengineering/core'
|
||||
import {
|
||||
MeasureMetricsContext,
|
||||
type WorkspaceDataId,
|
||||
type MeasureContext,
|
||||
type WorkspaceUuid,
|
||||
type WorkspaceIds
|
||||
} from '@hcengineering/core'
|
||||
import type { NamedStorageAdapter } from '@hcengineering/storage'
|
||||
import { FallbackStorageAdapter } from '../fallback'
|
||||
import { MemStorageAdapter } from './memAdapters'
|
||||
@ -9,7 +15,7 @@ describe('aggregator tests', () => {
|
||||
mem2: MemStorageAdapter
|
||||
aggr: FallbackStorageAdapter
|
||||
testCtx: MeasureContext
|
||||
ws1: WorkspaceDataId
|
||||
wsIds1: WorkspaceIds
|
||||
} {
|
||||
const mem1 = new MemStorageAdapter()
|
||||
|
||||
@ -20,24 +26,28 @@ describe('aggregator tests', () => {
|
||||
const aggr = new FallbackStorageAdapter(adapters)
|
||||
|
||||
const testCtx = new MeasureMetricsContext('test', {})
|
||||
const ws1 = 'ws1' as WorkspaceDataId
|
||||
return { mem1, mem2, aggr, ws1, testCtx }
|
||||
const wsIds1 = {
|
||||
uuid: 'ws1-uuid' as WorkspaceUuid,
|
||||
dataId: 'ws1-dataId' as WorkspaceDataId,
|
||||
url: 'ws1-url'
|
||||
}
|
||||
return { mem1, mem2, aggr, wsIds1, testCtx }
|
||||
}
|
||||
|
||||
it('not reuse existing storage', async () => {
|
||||
const { mem1, aggr, ws1, testCtx } = prepare1()
|
||||
const { mem1, aggr, wsIds1, testCtx } = prepare1()
|
||||
|
||||
// Test default provider
|
||||
await mem1.put(testCtx, ws1, 'test', 'data', 'text/plain')
|
||||
await mem1.put(testCtx, wsIds1, 'test', 'data', 'text/plain')
|
||||
|
||||
const stat = await aggr.stat(testCtx, ws1, 'test')
|
||||
const stat = await aggr.stat(testCtx, wsIds1, 'test')
|
||||
expect(stat?.provider).toEqual('mem1')
|
||||
|
||||
await aggr.put(testCtx, ws1, 'test', 'data2', 'text/plain')
|
||||
const stat2 = await aggr.stat(testCtx, ws1, 'test')
|
||||
await aggr.put(testCtx, wsIds1, 'test', 'data2', 'text/plain')
|
||||
const stat2 = await aggr.stat(testCtx, wsIds1, 'test')
|
||||
expect(stat2?.provider).toEqual('mem2')
|
||||
|
||||
const dta = Buffer.concat((await aggr.read(testCtx, ws1, 'test')) as any).toString()
|
||||
const dta = Buffer.concat((await aggr.read(testCtx, wsIds1, 'test')) as any).toString()
|
||||
expect(dta).toEqual('data2')
|
||||
})
|
||||
})
|
||||
|
@ -1,41 +1,57 @@
|
||||
import core, { type Blob, type MeasureContext, type WorkspaceDataId } from '@hcengineering/core'
|
||||
import core, {
|
||||
WorkspaceIds,
|
||||
WorkspaceUuid,
|
||||
type Blob,
|
||||
type MeasureContext,
|
||||
type WorkspaceDataId
|
||||
} from '@hcengineering/core'
|
||||
import { getDataId } from '@hcengineering/server-core'
|
||||
import type { BlobStorageIterator, BucketInfo, StorageAdapter, UploadedObjectInfo } from '@hcengineering/storage'
|
||||
import { Readable } from 'stream'
|
||||
|
||||
export class MemStorageAdapter implements StorageAdapter {
|
||||
files = new Map<string, Blob & { content: Buffer, workspace: WorkspaceDataId }>()
|
||||
|
||||
async initialize (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {}
|
||||
async initialize (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {}
|
||||
|
||||
async close (): Promise<void> {}
|
||||
|
||||
async exists (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<boolean> {
|
||||
async exists (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<boolean> {
|
||||
return true
|
||||
}
|
||||
|
||||
async make (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {}
|
||||
async make (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {}
|
||||
|
||||
async delete (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<void> {}
|
||||
async delete (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<void> {}
|
||||
|
||||
async listBuckets (ctx: MeasureContext): Promise<BucketInfo[]> {
|
||||
const workspaces = new Set(Array.from(this.files.values()).map((it) => it.workspace))
|
||||
return Array.from(workspaces).map((it) => ({
|
||||
name: it,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, it)
|
||||
await this.delete(ctx, {
|
||||
uuid: it as unknown as WorkspaceUuid,
|
||||
dataId: it,
|
||||
url: ''
|
||||
})
|
||||
},
|
||||
list: () => this.listStream(ctx, it)
|
||||
list: () =>
|
||||
this.listStream(ctx, {
|
||||
uuid: it as unknown as WorkspaceUuid,
|
||||
dataId: it,
|
||||
url: ''
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
async remove (ctx: MeasureContext, dataId: WorkspaceDataId, objectNames: string[]): Promise<void> {
|
||||
async remove (ctx: MeasureContext, wsIds: WorkspaceIds, objectNames: string[]): Promise<void> {
|
||||
for (const k of objectNames) {
|
||||
this.files.delete(dataId + '/' + k)
|
||||
this.files.delete(getDataId(wsIds) + '/' + k)
|
||||
}
|
||||
}
|
||||
|
||||
async listStream (ctx: MeasureContext, dataId: WorkspaceDataId): Promise<BlobStorageIterator> {
|
||||
const files = Array.from(this.files.values()).filter((it) => it.workspace === dataId)
|
||||
async listStream (ctx: MeasureContext, wsIds: WorkspaceIds): Promise<BlobStorageIterator> {
|
||||
const files = Array.from(this.files.values()).filter((it) => it.workspace === getDataId(wsIds))
|
||||
return {
|
||||
next: async () => {
|
||||
return files.splice(0, 100)
|
||||
@ -44,14 +60,14 @@ export class MemStorageAdapter implements StorageAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
async stat (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Blob | undefined> {
|
||||
return this.files.get(dataId + '/' + objectName)
|
||||
async stat (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Blob | undefined> {
|
||||
return this.files.get(getDataId(wsIds) + '/' + objectName)
|
||||
}
|
||||
|
||||
async get (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Readable> {
|
||||
async get (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Readable> {
|
||||
const readable = new Readable()
|
||||
readable._read = () => {}
|
||||
const content = this.files.get(dataId + '/' + objectName)?.content
|
||||
const content = this.files.get(getDataId(wsIds) + '/' + objectName)?.content
|
||||
readable.push(content)
|
||||
readable.push(null)
|
||||
return readable
|
||||
@ -59,7 +75,7 @@ export class MemStorageAdapter implements StorageAdapter {
|
||||
|
||||
async put (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
stream: string | Readable | Buffer,
|
||||
contentType: string,
|
||||
@ -85,6 +101,7 @@ export class MemStorageAdapter implements StorageAdapter {
|
||||
})
|
||||
}
|
||||
const data = Buffer.concat(buffer as any)
|
||||
const dataId = getDataId(wsIds)
|
||||
const dta = {
|
||||
_class: core.class.Blob,
|
||||
_id: objectName as any,
|
||||
@ -106,8 +123,8 @@ export class MemStorageAdapter implements StorageAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
async read (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<Buffer[]> {
|
||||
const content = this.files.get(dataId + '/' + objectName)?.content
|
||||
async read (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<Buffer[]> {
|
||||
const content = this.files.get(getDataId(wsIds) + '/' + objectName)?.content
|
||||
if (content === undefined) {
|
||||
throw new Error('NoSuchKey')
|
||||
}
|
||||
@ -116,7 +133,7 @@ export class MemStorageAdapter implements StorageAdapter {
|
||||
|
||||
partial (
|
||||
ctx: MeasureContext,
|
||||
dataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
objectName: string,
|
||||
offset: number,
|
||||
length?: number | undefined
|
||||
@ -125,7 +142,7 @@ export class MemStorageAdapter implements StorageAdapter {
|
||||
throw new Error('NoSuchKey')
|
||||
}
|
||||
|
||||
async getUrl (ctx: MeasureContext, dataId: WorkspaceDataId, objectName: string): Promise<string> {
|
||||
async getUrl (ctx: MeasureContext, wsIds: WorkspaceIds, objectName: string): Promise<string> {
|
||||
return '/files/' + objectName
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
import type { MeasureContext, WorkspaceDataId } from '@hcengineering/core'
|
||||
import type { MeasureContext, WorkspaceIds } from '@hcengineering/core'
|
||||
import type { StorageAdapter } from '@hcengineering/server-core'
|
||||
import type { Readable } from 'stream'
|
||||
|
||||
@ -17,11 +17,11 @@ export interface BlobResponse {
|
||||
export async function getFile (
|
||||
ctx: MeasureContext,
|
||||
client: StorageAdapter,
|
||||
wsDataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
file: string,
|
||||
res: BlobResponse
|
||||
): Promise<void> {
|
||||
const stat = await ctx.with('stat', {}, () => client.stat(ctx, wsDataId, file))
|
||||
const stat = await ctx.with('stat', {}, () => client.stat(ctx, wsIds, file))
|
||||
if (stat === undefined) {
|
||||
ctx.error('No such key', { file })
|
||||
res.cork(() => {
|
||||
@ -36,7 +36,7 @@ export async function getFile (
|
||||
{ contentType: stat.contentType },
|
||||
async (ctx) => {
|
||||
try {
|
||||
const dataStream = await ctx.with('readable', {}, () => client.get(ctx, wsDataId, file))
|
||||
const dataStream = await ctx.with('readable', {}, () => client.get(ctx, wsIds, file))
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
res.cork(() => {
|
||||
res.writeHead(200, {
|
||||
@ -62,7 +62,7 @@ export async function getFile (
|
||||
})
|
||||
})
|
||||
} catch (err: any) {
|
||||
ctx.error('get-file-error', { workspace: wsDataId, err })
|
||||
ctx.error('get-file-error', { workspace: wsIds, err })
|
||||
Analytics.handleError(err)
|
||||
res.cork(() => {
|
||||
res.status(500)
|
||||
@ -95,11 +95,11 @@ export async function getFileRange (
|
||||
ctx: MeasureContext,
|
||||
range: string,
|
||||
client: StorageAdapter,
|
||||
wsDataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
uuid: string,
|
||||
res: BlobResponse
|
||||
): Promise<void> {
|
||||
const stat = await ctx.with('stats', {}, () => client.stat(ctx, wsDataId, uuid))
|
||||
const stat = await ctx.with('stats', {}, () => client.stat(ctx, wsIds, uuid))
|
||||
if (stat === undefined) {
|
||||
ctx.error('No such key', { file: uuid })
|
||||
res.cork(() => {
|
||||
@ -133,7 +133,7 @@ export async function getFileRange (
|
||||
const dataStream = await ctx.with(
|
||||
'partial',
|
||||
{},
|
||||
() => client.partial(ctx, wsDataId, uuid, start, end - start + 1),
|
||||
() => client.partial(ctx, wsIds, uuid, start, end - start + 1),
|
||||
{}
|
||||
)
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
@ -173,7 +173,7 @@ export async function getFileRange (
|
||||
err?.message === 'No such key' ||
|
||||
err?.Code === 'NoSuchKey'
|
||||
) {
|
||||
ctx.info('No such key', { workspace: wsDataId, uuid })
|
||||
ctx.info('No such key', { workspace: wsIds, uuid })
|
||||
res.cork(() => {
|
||||
res.status(404)
|
||||
res.end()
|
||||
|
@ -34,7 +34,6 @@ import core, {
|
||||
type Client,
|
||||
type Ref,
|
||||
type WithLookup,
|
||||
type WorkspaceDataId,
|
||||
type PersonInfo
|
||||
} from '@hcengineering/core'
|
||||
import { consoleModelLogger, MigrateOperation, ModelLogger, tryMigrate } from '@hcengineering/model'
|
||||
@ -131,8 +130,8 @@ export async function initModel (
|
||||
await progress(60)
|
||||
|
||||
logger.log('create storage bucket', { workspaceId })
|
||||
|
||||
await storageAdapter.make(ctx, workspaceId as unknown as WorkspaceDataId)
|
||||
const wsIds = { uuid: workspaceId, url: '' } // We don't need dataId for new workspaces
|
||||
await storageAdapter.make(ctx, wsIds)
|
||||
await progress(100)
|
||||
} catch (err: any) {
|
||||
ctx.error('Failed to create workspace', { error: err })
|
||||
|
@ -15,7 +15,6 @@ import core, {
|
||||
SocialIdType,
|
||||
Space,
|
||||
TxOperations,
|
||||
type WorkspaceDataId,
|
||||
type WorkspaceIds
|
||||
} from '@hcengineering/core'
|
||||
import { ModelLogger } from '@hcengineering/model'
|
||||
@ -174,9 +173,8 @@ export class WorkspaceInitializer {
|
||||
const id = uuid()
|
||||
const resp = await fetch(step.fromUrl)
|
||||
const buffer = Buffer.from(await resp.arrayBuffer())
|
||||
const dataId = this.wsIds.dataId ?? (this.wsIds.uuid as unknown as WorkspaceDataId)
|
||||
|
||||
await this.storageAdapter.put(this.ctx, dataId, id, buffer, step.contentType, buffer.length)
|
||||
await this.storageAdapter.put(this.ctx, this.wsIds, id, buffer, step.contentType, buffer.length)
|
||||
if (step.resultVariable !== undefined) {
|
||||
vars[`\${${step.resultVariable}}`] = id
|
||||
vars[`\${${step.resultVariable}_size}`] = buffer.length
|
||||
@ -324,9 +322,8 @@ export class WorkspaceInitializer {
|
||||
|
||||
const json = parseMessageMarkdown(data ?? '', this.imageUrl)
|
||||
const markup = jsonToMarkup(json)
|
||||
const dataId = this.wsIds.dataId ?? (this.wsIds.uuid as unknown as WorkspaceDataId)
|
||||
|
||||
return await saveCollabJson(this.ctx, this.storageAdapter, dataId, doc, markup)
|
||||
return await saveCollabJson(this.ctx, this.storageAdapter, this.wsIds, doc, markup)
|
||||
}
|
||||
|
||||
private async fillProps<T extends Doc, P extends Partial<T> | Props<T>>(
|
||||
|
@ -20,8 +20,7 @@ import {
|
||||
type WorkspaceUuid,
|
||||
type MeasureContext,
|
||||
type Tx,
|
||||
type WorkspaceIds,
|
||||
type WorkspaceDataId
|
||||
type WorkspaceIds
|
||||
} from '@hcengineering/core'
|
||||
import platform, { Severity, Status, UNAUTHORIZED, unknownStatus } from '@hcengineering/platform'
|
||||
import { RPCHandler, type Response } from '@hcengineering/rpc'
|
||||
@ -282,12 +281,11 @@ export function startHttpServer (
|
||||
res.end()
|
||||
return
|
||||
}
|
||||
const dataId = wsIds.dataId ?? (wsIds.uuid as unknown as WorkspaceDataId)
|
||||
ctx
|
||||
.with(
|
||||
'storage upload',
|
||||
{ workspace: dataId },
|
||||
(ctx) => externalStorage.put(ctx, dataId, name, req, contentType, size !== -1 ? size : undefined),
|
||||
{ workspace: wsIds.uuid },
|
||||
(ctx) => externalStorage.put(ctx, wsIds, name, req, contentType, size !== -1 ? size : undefined),
|
||||
{ file: name, contentType }
|
||||
)
|
||||
.then(() => {
|
||||
@ -327,13 +325,12 @@ export function startHttpServer (
|
||||
}
|
||||
|
||||
const name = req.query.name as string
|
||||
const dataId = wsIds.dataId ?? (wsIds.uuid as unknown as WorkspaceDataId)
|
||||
|
||||
const range = req.headers.range
|
||||
if (range !== undefined) {
|
||||
ctx
|
||||
.with('file-range', { workspace: wsIds.uuid }, (ctx) =>
|
||||
getFileRange(ctx, range, externalStorage, dataId, name, wrapRes(res))
|
||||
getFileRange(ctx, range, externalStorage, wsIds, name, wrapRes(res))
|
||||
)
|
||||
.catch((err) => {
|
||||
Analytics.handleError(err)
|
||||
@ -342,7 +339,7 @@ export function startHttpServer (
|
||||
res.end()
|
||||
})
|
||||
} else {
|
||||
void getFile(ctx, externalStorage, dataId, name, wrapRes(res)).catch((err) => {
|
||||
void getFile(ctx, externalStorage, wsIds, name, wrapRes(res)).catch((err) => {
|
||||
Analytics.handleError(err)
|
||||
ctx.error('/api/v1/blob get error', { err })
|
||||
res.writeHead(404, {})
|
||||
|
@ -22,12 +22,13 @@ import {
|
||||
TranslateRequest,
|
||||
TranslateResponse
|
||||
} from '@hcengineering/ai-bot'
|
||||
import { MeasureContext, PersonUuid, Ref, SocialId, type WorkspaceUuid } from '@hcengineering/core'
|
||||
import { MeasureContext, PersonUuid, Ref, SocialId, type WorkspaceIds, type WorkspaceUuid } from '@hcengineering/core'
|
||||
import { Room } from '@hcengineering/love'
|
||||
import { WorkspaceInfoRecord } from '@hcengineering/server-ai-bot'
|
||||
import { getTransactorEndpoint } from '@hcengineering/server-client'
|
||||
import { getAccountClient } from '@hcengineering/server-client'
|
||||
import { generateToken } from '@hcengineering/server-token'
|
||||
import { htmlToMarkup, markupToHTML } from '@hcengineering/text'
|
||||
import { isWorkspaceLoginInfo } from '@hcengineering/account-client'
|
||||
import { encodingForModel } from 'js-tiktoken'
|
||||
import OpenAI from 'openai'
|
||||
|
||||
@ -112,16 +113,27 @@ export class AIControl {
|
||||
}
|
||||
|
||||
const token = generateToken(this.personUuid, workspace, { service: 'aibot' })
|
||||
const endpoint = await getTransactorEndpoint(token)
|
||||
const wsLoginInfo = await getAccountClient(token).getLoginInfoByToken()
|
||||
|
||||
if (!isWorkspaceLoginInfo(wsLoginInfo)) {
|
||||
this.ctx.error('Invalid workspace login info', { workspace, wsLoginInfo })
|
||||
return
|
||||
}
|
||||
|
||||
const wsIds: WorkspaceIds = {
|
||||
uuid: wsLoginInfo.workspace,
|
||||
url: wsLoginInfo.workspaceUrl,
|
||||
dataId: wsLoginInfo.workspaceDataId
|
||||
}
|
||||
|
||||
this.ctx.info('Listen workspace: ', { workspace })
|
||||
|
||||
return new WorkspaceClient(
|
||||
this.storageAdapter,
|
||||
this.storage,
|
||||
endpoint,
|
||||
wsLoginInfo.endpoint,
|
||||
token,
|
||||
workspace,
|
||||
wsIds,
|
||||
this.personUuid,
|
||||
this.socialIds,
|
||||
this.ctx.newChild(workspace, {}),
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
import { MongoClientReference, getMongoClient } from '@hcengineering/mongo'
|
||||
import { Collection, Db, MongoClient, ObjectId, UpdateFilter, WithId } from 'mongodb'
|
||||
import { Doc, Ref, SortingOrder } from '@hcengineering/core'
|
||||
import { Doc, Ref, SortingOrder, WorkspaceUuid } from '@hcengineering/core'
|
||||
import { WorkspaceInfoRecord } from '@hcengineering/server-ai-bot'
|
||||
|
||||
import config from './config'
|
||||
@ -52,7 +52,7 @@ export class DbStorage {
|
||||
return (await this.historyCollection.insertOne(record)).insertedId
|
||||
}
|
||||
|
||||
async getHistoryRecords (workspace: string, objectId: Ref<Doc>): Promise<WithId<HistoryRecord>[]> {
|
||||
async getHistoryRecords (workspace: WorkspaceUuid, objectId: Ref<Doc>): Promise<WithId<HistoryRecord>[]> {
|
||||
return await this.historyCollection
|
||||
.find({ workspace, objectId }, { sort: { timestamp: SortingOrder.Ascending } })
|
||||
.toArray()
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { MarkupBlobRef, PersonId, Ref, WorkspaceDataId } from '@hcengineering/core'
|
||||
import { MarkupBlobRef, PersonId, Ref } from '@hcengineering/core'
|
||||
import document, { Document, getFirstRank, Teamspace } from '@hcengineering/document'
|
||||
import { makeRank } from '@hcengineering/rank'
|
||||
import { parseMessageMarkdown } from '@hcengineering/text'
|
||||
@ -37,13 +37,12 @@ async function pdfToMarkdown (
|
||||
name: string | undefined
|
||||
): Promise<string | undefined> {
|
||||
if (config.DataLabApiKey !== '') {
|
||||
const dataId = workspaceClient.workspace as any as WorkspaceDataId
|
||||
try {
|
||||
const stat = await workspaceClient.storage.stat(workspaceClient.ctx, dataId, fileId)
|
||||
const stat = await workspaceClient.storage.stat(workspaceClient.ctx, workspaceClient.wsIds, fileId)
|
||||
if (stat?.contentType !== 'application/pdf') {
|
||||
return
|
||||
}
|
||||
const file = await workspaceClient.storage.get(workspaceClient.ctx, dataId, fileId)
|
||||
const file = await workspaceClient.storage.get(workspaceClient.ctx, workspaceClient.wsIds, fileId)
|
||||
const buffer = await stream2buffer(file)
|
||||
|
||||
const url = 'https://www.datalab.to/api/v1/marker'
|
||||
@ -96,8 +95,7 @@ async function saveFile (
|
||||
|
||||
const client = await workspaceClient.opClient
|
||||
const fileId = uuid()
|
||||
const dataId = workspaceClient.workspace as any as WorkspaceDataId
|
||||
await workspaceClient.storage.put(workspaceClient.ctx, dataId, fileId, converted, 'application/json')
|
||||
await workspaceClient.storage.put(workspaceClient.ctx, workspaceClient.wsIds, fileId, converted, 'application/json')
|
||||
|
||||
const teamspaces = await client.findAll(document.class.Teamspace, {})
|
||||
const parent = await client.findOne(document.class.Document, { _id: args.parent as Ref<Document> })
|
||||
|
@ -46,8 +46,8 @@ import core, {
|
||||
Tx,
|
||||
TxCUD,
|
||||
TxOperations,
|
||||
WorkspaceDataId,
|
||||
type WorkspaceUuid
|
||||
type WorkspaceUuid,
|
||||
type WorkspaceIds
|
||||
} from '@hcengineering/core'
|
||||
import { Room } from '@hcengineering/love'
|
||||
import { WorkspaceInfoRecord } from '@hcengineering/server-ai-bot'
|
||||
@ -88,7 +88,7 @@ export class WorkspaceClient {
|
||||
readonly dbStorage: DbStorage,
|
||||
readonly transactorUrl: string,
|
||||
readonly token: string,
|
||||
readonly workspace: WorkspaceUuid,
|
||||
readonly wsIds: WorkspaceIds,
|
||||
readonly personUuid: PersonUuid,
|
||||
readonly socialIds: SocialId[],
|
||||
readonly ctx: MeasureContext,
|
||||
@ -120,19 +120,25 @@ export class WorkspaceClient {
|
||||
await this.checkEmployeeInfo(opClient)
|
||||
|
||||
if (this.aiPerson !== undefined && config.LoveEndpoint !== '') {
|
||||
this.love = new LoveController(this.workspace, this.ctx.newChild('love', {}), this.token, opClient, this.aiPerson)
|
||||
this.love = new LoveController(
|
||||
this.wsIds.uuid,
|
||||
this.ctx.newChild('love', {}),
|
||||
this.token,
|
||||
opClient,
|
||||
this.aiPerson
|
||||
)
|
||||
}
|
||||
|
||||
this.client.notify = (...txes: Tx[]) => {
|
||||
void this.txHandler(opClient, txes as TxCUD<Doc>[])
|
||||
}
|
||||
this.ctx.info('Initialized workspace', { workspace: this.workspace })
|
||||
this.ctx.info('Initialized workspace', { workspace: this.wsIds })
|
||||
|
||||
return opClient
|
||||
}
|
||||
|
||||
private async checkEmployeeInfo (client: TxOperations): Promise<void> {
|
||||
this.ctx.info('Upload avatar file', { workspace: this.workspace })
|
||||
this.ctx.info('Upload avatar file', { workspace: this.wsIds })
|
||||
|
||||
try {
|
||||
const stat = fs.statSync(config.AvatarPath)
|
||||
@ -145,16 +151,9 @@ export class WorkspaceClient {
|
||||
if (!isAlreadyUploaded) {
|
||||
const data = fs.readFileSync(config.AvatarPath)
|
||||
|
||||
await this.storage.put(
|
||||
this.ctx,
|
||||
this.workspace as any as WorkspaceDataId,
|
||||
config.AvatarName,
|
||||
data,
|
||||
config.AvatarContentType,
|
||||
data.length
|
||||
)
|
||||
await this.updateAvatarInfo(this.workspace, config.AvatarPath, lastModified)
|
||||
this.ctx.info('Avatar file uploaded successfully', { workspace: this.workspace, path: config.AvatarPath })
|
||||
await this.storage.put(this.ctx, this.wsIds, config.AvatarName, data, config.AvatarContentType, data.length)
|
||||
await this.updateAvatarInfo(this.wsIds.uuid, config.AvatarPath, lastModified)
|
||||
this.ctx.info('Avatar file uploaded successfully', { workspace: this.wsIds, path: config.AvatarPath })
|
||||
}
|
||||
} catch (e) {
|
||||
this.ctx.error('Failed to upload avatar file', { e })
|
||||
@ -163,7 +162,7 @@ export class WorkspaceClient {
|
||||
await this.checkPersonData(client)
|
||||
}
|
||||
|
||||
private async updateAvatarInfo (workspace: string, path: string, lastModified: number): Promise<void> {
|
||||
private async updateAvatarInfo (workspace: WorkspaceUuid, path: string, lastModified: number): Promise<void> {
|
||||
const record = await this.dbStorage.getWorkspace(workspace)
|
||||
|
||||
if (record === undefined) {
|
||||
@ -194,10 +193,10 @@ export class WorkspaceClient {
|
||||
return
|
||||
}
|
||||
|
||||
const exist = await this.storage.stat(this.ctx, this.workspace as any, config.AvatarName)
|
||||
const exist = await this.storage.stat(this.ctx, this.wsIds, config.AvatarName)
|
||||
|
||||
if (exist === undefined) {
|
||||
this.ctx.error('Cannot find file', { file: config.AvatarName, workspace: this.workspace })
|
||||
this.ctx.error('Cannot find file', { file: config.AvatarName, workspace: this.wsIds })
|
||||
return
|
||||
}
|
||||
|
||||
@ -227,7 +226,7 @@ export class WorkspaceClient {
|
||||
return this.historyMap.get(objectId) ?? []
|
||||
}
|
||||
|
||||
const historyRecords = await this.dbStorage.getHistoryRecords(this.workspace, objectId)
|
||||
const historyRecords = await this.dbStorage.getHistoryRecords(this.wsIds.uuid, objectId)
|
||||
this.historyMap.set(objectId, historyRecords)
|
||||
return historyRecords
|
||||
}
|
||||
@ -260,12 +259,12 @@ export class WorkspaceClient {
|
||||
objectId,
|
||||
objectClass,
|
||||
tokens,
|
||||
workspace: this.workspace
|
||||
workspace: this.wsIds.uuid
|
||||
}
|
||||
|
||||
await this.dbStorage.addHistoryRecord(summaryRecord)
|
||||
await this.dbStorage.removeHistoryRecords(toSummarize.map(({ _id }) => _id))
|
||||
const newHistory = await this.dbStorage.getHistoryRecords(this.workspace, objectId)
|
||||
const newHistory = await this.dbStorage.getHistoryRecords(this.wsIds.uuid, objectId)
|
||||
this.historyMap.set(objectId, newHistory)
|
||||
this.summarizing.delete(objectId)
|
||||
}
|
||||
@ -280,7 +279,7 @@ export class WorkspaceClient {
|
||||
): Promise<void> {
|
||||
const currentHistory = (await this.getHistory(objectId)) ?? []
|
||||
const newRecord: HistoryRecord = {
|
||||
workspace: this.workspace,
|
||||
workspace: this.wsIds.uuid,
|
||||
message,
|
||||
objectId,
|
||||
objectClass,
|
||||
@ -392,7 +391,7 @@ export class WorkspaceClient {
|
||||
await this.opClient.close()
|
||||
}
|
||||
|
||||
this.ctx.info('Closed workspace client: ', { workspace: this.workspace })
|
||||
this.ctx.info('Closed workspace client: ', { workspace: this.wsIds })
|
||||
}
|
||||
|
||||
private async txHandler (_: TxOperations, txes: TxCUD<Doc>[]): Promise<void> {
|
||||
|
@ -641,9 +641,8 @@ export class GithubWorker implements IntegrationManager {
|
||||
|
||||
async uploadFile (patch: string, file?: string, contentType?: string): Promise<Blob | undefined> {
|
||||
const id: string = file ?? generateId()
|
||||
const dataId = this.workspace.dataId ?? (this.workspace.uuid as unknown as WorkspaceDataId)
|
||||
await this.storageAdapter.put(this.ctx, dataId, id, patch, contentType ?? 'text/x-patch')
|
||||
return await this.storageAdapter.stat(this.ctx, dataId, id)
|
||||
await this.storageAdapter.put(this.ctx, this.workspace, id, patch, contentType ?? 'text/x-patch')
|
||||
return await this.storageAdapter.stat(this.ctx, this.workspace, id)
|
||||
}
|
||||
|
||||
integrationRepositories: WithLookup<GithubIntegrationRepository>[] = []
|
||||
|
@ -12,7 +12,7 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
import { MeasureContext, Ref, WorkspaceDataId, WorkspaceUuid } from '@hcengineering/core'
|
||||
import { MeasureContext, Ref, WorkspaceIds } from '@hcengineering/core'
|
||||
import { setMetadata } from '@hcengineering/platform'
|
||||
import serverClient from '@hcengineering/server-client'
|
||||
import { initStatisticsContext, StorageConfig, StorageConfiguration } from '@hcengineering/server-core'
|
||||
@ -76,8 +76,7 @@ export const main = async (): Promise<void> => {
|
||||
string,
|
||||
{
|
||||
name: string
|
||||
workspace: WorkspaceUuid
|
||||
workspaceDataId: WorkspaceDataId
|
||||
wsIds: WorkspaceIds
|
||||
meetingMinutes?: Ref<MeetingMinutes>
|
||||
}
|
||||
>()
|
||||
@ -90,9 +89,9 @@ export const main = async (): Promise<void> => {
|
||||
for (const res of event.egressInfo.fileResults) {
|
||||
const data = dataByUUID.get(res.filename)
|
||||
if (data !== undefined && storageConfig !== undefined) {
|
||||
const storedBlob = await saveFile(ctx, data.workspaceDataId, storageConfig, s3storageConfig, res.filename)
|
||||
const storedBlob = await saveFile(ctx, data.wsIds, storageConfig, s3storageConfig, res.filename)
|
||||
if (storedBlob !== undefined) {
|
||||
const client = await WorkspaceClient.create(data.workspace, ctx)
|
||||
const client = await WorkspaceClient.create(data.wsIds.uuid, ctx)
|
||||
await client.saveFile(storedBlob._id, data.name, storedBlob, data.meetingMinutes)
|
||||
await client.close()
|
||||
}
|
||||
@ -145,19 +144,18 @@ export const main = async (): Promise<void> => {
|
||||
const meetingMinutes = req.body.meetingMinutes
|
||||
|
||||
try {
|
||||
const wsLoginInfo = await getAccountClient(token).getLoginInfoByToken()
|
||||
const workspace = (wsLoginInfo as WorkspaceLoginInfo)?.workspace
|
||||
if (workspace == null) {
|
||||
const wsLoginInfo = (await getAccountClient(token).getLoginInfoByToken()) as WorkspaceLoginInfo
|
||||
if (wsLoginInfo?.workspace == null) {
|
||||
console.error('No workspace found for the token')
|
||||
res.status(401).send()
|
||||
return
|
||||
}
|
||||
const dataId = (wsLoginInfo as WorkspaceLoginInfo)?.workspaceDataId ?? (workspace as unknown as WorkspaceDataId)
|
||||
const dateStr = new Date().toISOString().replace('T', '_').slice(0, 19)
|
||||
const name = `${room}_${dateStr}.mp4`
|
||||
const id = await startRecord(ctx, storageConfig, s3storageConfig, egressClient, roomClient, roomName, dataId)
|
||||
dataByUUID.set(id, { name, workspace, workspaceDataId: dataId, meetingMinutes })
|
||||
ctx.info('Start recording', { workspace, roomName, meetingMinutes })
|
||||
const wsIds = { uuid: wsLoginInfo.workspace, dataId: wsLoginInfo.workspaceDataId, url: wsLoginInfo.workspaceUrl }
|
||||
const id = await startRecord(ctx, storageConfig, s3storageConfig, egressClient, roomClient, roomName, wsIds)
|
||||
dataByUUID.set(id, { name, wsIds, meetingMinutes })
|
||||
ctx.info('Start recording', { workspace: wsLoginInfo.workspace, roomName, meetingMinutes })
|
||||
res.send()
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
@ -289,13 +287,13 @@ const startRecord = async (
|
||||
egressClient: EgressClient,
|
||||
roomClient: RoomServiceClient,
|
||||
roomName: string,
|
||||
workspaceId: WorkspaceDataId
|
||||
wsIds: WorkspaceIds
|
||||
): Promise<string> => {
|
||||
if (storageConfig === undefined) {
|
||||
console.error('please provide storage configuration')
|
||||
throw new Error('please provide storage configuration')
|
||||
}
|
||||
const uploadParams = await getS3UploadParams(ctx, workspaceId, storageConfig, s3StorageConfig)
|
||||
const uploadParams = await getS3UploadParams(ctx, wsIds, storageConfig, s3StorageConfig)
|
||||
|
||||
const { filepath, endpoint, accessKey, secret, region, bucket } = uploadParams
|
||||
const output = new EncodedFileOutput({
|
||||
|
@ -13,7 +13,14 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { Blob, MeasureContext, systemAccountUuid, type WorkspaceDataId } from '@hcengineering/core'
|
||||
import {
|
||||
Blob,
|
||||
MeasureContext,
|
||||
systemAccountUuid,
|
||||
WorkspaceIds,
|
||||
WorkspaceUuid,
|
||||
type WorkspaceDataId
|
||||
} from '@hcengineering/core'
|
||||
import { DatalakeConfig, DatalakeService, createDatalakeClient } from '@hcengineering/datalake'
|
||||
import { S3Config, S3Service } from '@hcengineering/s3'
|
||||
import { StorageConfig } from '@hcengineering/server-core'
|
||||
@ -31,19 +38,20 @@ export interface S3UploadParams {
|
||||
|
||||
export async function getS3UploadParams (
|
||||
ctx: MeasureContext,
|
||||
workspaceDataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
storageConfig: StorageConfig,
|
||||
s3StorageConfig: StorageConfig | undefined
|
||||
): Promise<S3UploadParams> {
|
||||
if (storageConfig.kind === 's3') {
|
||||
return await getS3UploadParamsS3(ctx, workspaceDataId, storageConfig as S3Config)
|
||||
const dataId = wsIds.dataId ?? (wsIds.uuid as unknown as WorkspaceDataId)
|
||||
return await getS3UploadParamsS3(ctx, dataId, storageConfig as S3Config)
|
||||
} else if (storageConfig.kind === 'datalake') {
|
||||
if (s3StorageConfig === undefined || s3StorageConfig.kind !== 's3') {
|
||||
throw new Error('Please provide S3 storage config')
|
||||
}
|
||||
return await getS3UploadParamsDatalake(
|
||||
ctx,
|
||||
workspaceDataId,
|
||||
wsIds.uuid,
|
||||
storageConfig as DatalakeConfig,
|
||||
s3StorageConfig as S3Config
|
||||
)
|
||||
@ -54,24 +62,18 @@ export async function getS3UploadParams (
|
||||
|
||||
export async function saveFile (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
storageConfig: StorageConfig,
|
||||
s3StorageConfig: StorageConfig | undefined,
|
||||
filename: string
|
||||
): Promise<Blob | undefined> {
|
||||
if (storageConfig.kind === 's3') {
|
||||
return await saveFileToS3(ctx, workspaceId, storageConfig as S3Config, filename)
|
||||
return await saveFileToS3(ctx, wsIds, storageConfig as S3Config, filename)
|
||||
} else if (storageConfig.kind === 'datalake') {
|
||||
if (s3StorageConfig === undefined || s3StorageConfig.kind !== 's3') {
|
||||
throw new Error('Please provide S3 storage config')
|
||||
}
|
||||
return await saveFileToDatalake(
|
||||
ctx,
|
||||
workspaceId,
|
||||
storageConfig as DatalakeConfig,
|
||||
s3StorageConfig as S3Config,
|
||||
filename
|
||||
)
|
||||
return await saveFileToDatalake(ctx, wsIds, storageConfig as DatalakeConfig, s3StorageConfig as S3Config, filename)
|
||||
} else {
|
||||
throw new Error('Unknown storage kind: ' + storageConfig.kind)
|
||||
}
|
||||
@ -102,7 +104,7 @@ async function getS3UploadParamsS3 (
|
||||
|
||||
async function getS3UploadParamsDatalake (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
workspaceId: WorkspaceUuid,
|
||||
config: DatalakeConfig,
|
||||
s3config: S3Config
|
||||
): Promise<S3UploadParams> {
|
||||
@ -129,19 +131,20 @@ async function getS3UploadParamsDatalake (
|
||||
|
||||
async function saveFileToS3 (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
config: S3Config,
|
||||
filename: string
|
||||
): Promise<Blob | undefined> {
|
||||
const storageAdapter = new S3Service(config)
|
||||
const prefix = rootPrefix(config, workspaceId)
|
||||
const dataId = wsIds.dataId ?? (wsIds.uuid as unknown as WorkspaceDataId)
|
||||
const prefix = rootPrefix(config, dataId)
|
||||
const uuid = stripPrefix(prefix, filename)
|
||||
return await storageAdapter.stat(ctx, workspaceId, uuid)
|
||||
return await storageAdapter.stat(ctx, wsIds, uuid)
|
||||
}
|
||||
|
||||
async function saveFileToDatalake (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
config: DatalakeConfig,
|
||||
s3config: S3Config,
|
||||
filename: string
|
||||
@ -150,23 +153,23 @@ async function saveFileToDatalake (
|
||||
const client = createDatalakeClient(config, token)
|
||||
const storageAdapter = new DatalakeService(config)
|
||||
|
||||
const prefix = rootPrefix(s3config, workspaceId)
|
||||
const prefix = rootPrefix(s3config, wsIds.uuid)
|
||||
const uuid = stripPrefix(prefix, filename)
|
||||
|
||||
await client.uploadFromR2(ctx, workspaceId, uuid, { filename: uuid })
|
||||
await client.uploadFromR2(ctx, wsIds.uuid, uuid, { filename: uuid })
|
||||
|
||||
return await storageAdapter.stat(ctx, workspaceId, uuid)
|
||||
return await storageAdapter.stat(ctx, wsIds, uuid)
|
||||
}
|
||||
|
||||
function getBucket (storageConfig: S3Config, workspaceId: WorkspaceDataId): string {
|
||||
return storageConfig.rootBucket ?? (storageConfig.bucketPrefix ?? '') + workspaceId
|
||||
}
|
||||
|
||||
function getBucketFolder (workspaceId: WorkspaceDataId): string {
|
||||
function getBucketFolder (workspaceId: WorkspaceDataId | WorkspaceUuid): string {
|
||||
return workspaceId
|
||||
}
|
||||
|
||||
function getDocumentKey (storageConfig: any, workspace: WorkspaceDataId, name: string): string {
|
||||
function getDocumentKey (storageConfig: any, workspace: WorkspaceDataId | WorkspaceUuid, name: string): string {
|
||||
return storageConfig.rootBucket === undefined ? name : `${getBucketFolder(workspace)}/${name}`
|
||||
}
|
||||
|
||||
@ -177,6 +180,6 @@ function stripPrefix (prefix: string | undefined, key: string): string {
|
||||
return key
|
||||
}
|
||||
|
||||
function rootPrefix (storageConfig: S3Config, workspaceId: WorkspaceDataId): string | undefined {
|
||||
function rootPrefix (storageConfig: S3Config, workspaceId: WorkspaceDataId | WorkspaceUuid): string | undefined {
|
||||
return storageConfig.rootBucket !== undefined ? getBucketFolder(workspaceId) + '/' : undefined
|
||||
}
|
||||
|
@ -14,7 +14,7 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { generateId, WorkspaceDataId } from '@hcengineering/core'
|
||||
import { generateId, type WorkspaceIds } from '@hcengineering/core'
|
||||
import { StorageConfiguration, initStatisticsContext } from '@hcengineering/server-core'
|
||||
import { buildStorageFromConfig } from '@hcengineering/server-storage'
|
||||
import { getClient as getAccountClientRaw, AccountClient, WorkspaceLoginInfo } from '@hcengineering/account-client'
|
||||
@ -94,7 +94,7 @@ const extractToken = (headers: IncomingHttpHeaders, queryParams: any): string =>
|
||||
}
|
||||
}
|
||||
|
||||
type AsyncRequestHandler = (req: Request, res: Response, wsDataId: WorkspaceDataId, next: NextFunction) => Promise<void>
|
||||
type AsyncRequestHandler = (req: Request, res: Response, wsIds: WorkspaceIds, next: NextFunction) => Promise<void>
|
||||
|
||||
const handleRequest = async (
|
||||
fn: AsyncRequestHandler,
|
||||
@ -104,14 +104,16 @@ const handleRequest = async (
|
||||
): Promise<void> => {
|
||||
try {
|
||||
const token = extractToken(req.headers, req.query)
|
||||
const loginInfo = await getAccountClient(token).getLoginInfoByToken()
|
||||
const workspace = (loginInfo as WorkspaceLoginInfo)?.workspace
|
||||
if (workspace === undefined) {
|
||||
const wsLoginInfo = (await getAccountClient(token).getLoginInfoByToken()) as WorkspaceLoginInfo
|
||||
if (wsLoginInfo?.workspace === undefined) {
|
||||
throw new ApiError(401, "Couldn't find workspace with the provided token")
|
||||
}
|
||||
const wsDataId = (loginInfo as WorkspaceLoginInfo)?.workspaceDataId
|
||||
|
||||
await fn(req, res, wsDataId ?? (workspace as unknown as WorkspaceDataId), next)
|
||||
const wsIds = {
|
||||
uuid: wsLoginInfo.workspace,
|
||||
dataId: wsLoginInfo.workspaceDataId,
|
||||
url: wsLoginInfo.workspaceUrl
|
||||
}
|
||||
await fn(req, res, wsIds, next)
|
||||
} catch (err: unknown) {
|
||||
next(err)
|
||||
}
|
||||
@ -132,7 +134,7 @@ export function createServer (storageConfig: StorageConfiguration): { app: Expre
|
||||
|
||||
app.get(
|
||||
'/print',
|
||||
wrapRequest(async (req, res, wsUuid) => {
|
||||
wrapRequest(async (req, res, wsIds) => {
|
||||
const rawlink = req.query.link as string
|
||||
const link = decodeURIComponent(rawlink)
|
||||
const kind = req.query.kind as PrintOptions['kind']
|
||||
@ -166,7 +168,7 @@ export function createServer (storageConfig: StorageConfiguration): { app: Expre
|
||||
|
||||
const printId = `print-${generateId()}`
|
||||
|
||||
await storageAdapter.put(measureCtx, wsUuid, printId, printRes, `application/${kind}`, printRes.length)
|
||||
await storageAdapter.put(measureCtx, wsIds, printId, printRes, `application/${kind}`, printRes.length)
|
||||
|
||||
res.contentType('application/json')
|
||||
res.send({ id: printId })
|
||||
|
@ -14,7 +14,7 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { generateId, WorkspaceDataId } from '@hcengineering/core'
|
||||
import { generateId, type WorkspaceIds } from '@hcengineering/core'
|
||||
import { initStatisticsContext, StorageConfiguration } from '@hcengineering/server-core'
|
||||
import { buildStorageFromConfig } from '@hcengineering/server-storage'
|
||||
import { getClient as getAccountClientRaw, AccountClient, WorkspaceLoginInfo } from '@hcengineering/account-client'
|
||||
@ -35,7 +35,7 @@ function getAccountClient (token: string): AccountClient {
|
||||
type AsyncRequestHandler = (
|
||||
req: Request,
|
||||
res: Response,
|
||||
wsDataId: WorkspaceDataId,
|
||||
wsIds: WorkspaceIds,
|
||||
branding: Branding | null,
|
||||
next: NextFunction
|
||||
) => Promise<void>
|
||||
@ -49,14 +49,17 @@ const handleRequest = async (
|
||||
): Promise<void> => {
|
||||
try {
|
||||
const { rawToken } = extractToken(req.headers, req.query)
|
||||
const loginInfo = await getAccountClient(rawToken).getLoginInfoByToken()
|
||||
const workspace = (loginInfo as WorkspaceLoginInfo)?.workspace
|
||||
if (workspace === undefined) {
|
||||
const wsLoginInfo = (await getAccountClient(rawToken).getLoginInfoByToken()) as WorkspaceLoginInfo
|
||||
if (wsLoginInfo?.workspace === undefined) {
|
||||
throw new ApiError(401, "Couldn't find workspace with the provided token")
|
||||
}
|
||||
const wsDataId = (loginInfo as WorkspaceLoginInfo)?.workspaceDataId
|
||||
const wsIds = {
|
||||
uuid: wsLoginInfo.workspace,
|
||||
dataId: wsLoginInfo.workspaceDataId,
|
||||
url: wsLoginInfo.workspaceUrl
|
||||
}
|
||||
const branding = extractBranding(brandings, req.headers)
|
||||
await fn(req, res, wsDataId ?? (workspace as unknown as WorkspaceDataId), branding, next)
|
||||
await fn(req, res, wsIds, branding, next)
|
||||
} catch (err: unknown) {
|
||||
next(err)
|
||||
}
|
||||
@ -78,14 +81,14 @@ export function createServer (storageConfig: StorageConfiguration, brandings: Br
|
||||
|
||||
app.post(
|
||||
'/sign',
|
||||
wrapRequest(brandings, async (req, res, wsDataId, branding) => {
|
||||
wrapRequest(brandings, async (req, res, wsIds, branding) => {
|
||||
const fileId = req.body.fileId as string
|
||||
|
||||
if (fileId === undefined) {
|
||||
throw new ApiError(400, 'Missing fileId')
|
||||
}
|
||||
|
||||
const originalFile = await storageAdapter.read(measureCtx, wsDataId, fileId)
|
||||
const originalFile = await storageAdapter.read(measureCtx, wsIds, fileId)
|
||||
const ctx = {
|
||||
title: branding?.title ?? 'Huly'
|
||||
}
|
||||
@ -97,7 +100,7 @@ export function createServer (storageConfig: StorageConfiguration, brandings: Br
|
||||
|
||||
const signedId = `signed-${fileId}-${generateId()}`
|
||||
|
||||
await storageAdapter.put(measureCtx, wsDataId, signedId, signRes, 'application/pdf', signRes.length)
|
||||
await storageAdapter.put(measureCtx, wsIds, signedId, signRes, 'application/pdf', signRes.length)
|
||||
|
||||
res.contentType('application/json')
|
||||
res.send({ id: signedId })
|
||||
|
@ -638,7 +638,7 @@ export class WorkspaceWorker {
|
||||
const attachments = await this.client.findAll(attachment.class.Attachment, { attachedTo: msg._id })
|
||||
const res: Buffer[] = []
|
||||
for (const attachment of attachments) {
|
||||
const chunks = await this.storageAdapter.read(this.ctx, this.workspace, attachment.file) // TODO: FIXME
|
||||
const chunks = await this.storageAdapter.read(this.ctx, this.workspace as any, attachment.file) // TODO: FIXME <--WorkspaceIds
|
||||
const buffer = Buffer.concat(chunks as unknown as Uint8Array[])
|
||||
if (buffer.length > 0) {
|
||||
res.push(
|
||||
@ -663,7 +663,7 @@ export class WorkspaceWorker {
|
||||
try {
|
||||
const id = uuid()
|
||||
file.size = file.size ?? file.file.length
|
||||
await this.storageAdapter.put(this.ctx, this.workspace, id, file.file, file.type, file.size)
|
||||
await this.storageAdapter.put(this.ctx, this.workspace as any, id, file.file, file.type, file.size) // TODO: FIXME <--WorkspaceIds
|
||||
const modifiedOn = event.msg.date * 1000
|
||||
const tx = factory.createTxCollectionCUD<TelegramMessage, Attachment>(
|
||||
msg._class,
|
||||
|
Loading…
Reference in New Issue
Block a user