mirror of
https://github.com/hcengineering/platform.git
synced 2025-04-09 09:41:03 +00:00
Merge remote-tracking branch 'origin/develop' into staging
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
commit
86191696d0
6
.vscode/launch.json
vendored
6
.vscode/launch.json
vendored
@ -118,9 +118,9 @@
|
||||
"args": ["src/__start.ts"],
|
||||
"env": {
|
||||
"MONGO_URL": "mongodb://localhost:27017",
|
||||
// "DB_URL": "mongodb://localhost:27017",
|
||||
"REGION": "pg",
|
||||
"DB_URL": "postgresql://postgres:example@localhost:5432",
|
||||
"DB_URL": "mongodb://localhost:27017",
|
||||
"REGION": "",
|
||||
// "DB_URL": "postgresql://postgres:example@localhost:5432",
|
||||
"SERVER_SECRET": "secret",
|
||||
"TRANSACTOR_URL": "ws://localhost:3333",
|
||||
"ACCOUNTS_URL": "http://localhost:3000",
|
||||
|
@ -115,10 +115,9 @@ services:
|
||||
- MODEL_ENABLED=*
|
||||
- ACCOUNTS_URL=http://host.docker.internal:3000
|
||||
- BRANDING_PATH=/var/cfg/branding.json
|
||||
- NOTIFY_INBOX_ONLY=true
|
||||
# - PARALLEL=2
|
||||
# - INIT_SCRIPT_URL=https://raw.githubusercontent.com/hcengineering/init/main/script.yaml
|
||||
# - INIT_WORKSPACE=onboarding
|
||||
- INIT_SCRIPT_URL=https://raw.githubusercontent.com/hcengineering/init/main/script.yaml
|
||||
- INIT_WORKSPACE=test
|
||||
restart: unless-stopped
|
||||
workspacepg:
|
||||
image: hardcoreeng/workspace
|
||||
@ -142,7 +141,7 @@ services:
|
||||
- ACCOUNTS_URL=http://host.docker.internal:3000
|
||||
- BRANDING_PATH=/var/cfg/branding.json
|
||||
# - PARALLEL=2
|
||||
# - INIT_SCRIPT_URL=https://raw.githubusercontent.com/hcengineering/init/main/script.yaml
|
||||
- INIT_SCRIPT_URL=https://raw.githubusercontent.com/hcengineering/init/main/script.yaml
|
||||
# - INIT_WORKSPACE=onboarding
|
||||
restart: unless-stopped
|
||||
collaborator:
|
||||
|
@ -72,8 +72,6 @@ async function processFixJsonMarkupFor (
|
||||
db: Db,
|
||||
storageAdapter: StorageAdapter
|
||||
): Promise<void> {
|
||||
console.log('processing', domain, _class)
|
||||
|
||||
const collection = db.collection<Doc>(domain)
|
||||
const docs = await collection.find({ _class }).toArray()
|
||||
for (const doc of docs) {
|
||||
@ -119,8 +117,6 @@ async function processFixJsonMarkupFor (
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('...processed', docs.length)
|
||||
}
|
||||
|
||||
export async function migrateMarkup (
|
||||
@ -151,12 +147,9 @@ export async function migrateMarkup (
|
||||
const collection = workspaceDb.collection(domain)
|
||||
|
||||
const filter = hierarchy.isMixin(_class) ? { [_class]: { $exists: true } } : { _class }
|
||||
|
||||
const count = await collection.countDocuments(filter)
|
||||
const iterator = collection.find<Doc>(filter)
|
||||
|
||||
try {
|
||||
console.log('processing', _class, '->', count)
|
||||
await processMigrateMarkupFor(ctx, hierarchy, storageAdapter, workspaceId, attributes, iterator, concurrency)
|
||||
} finally {
|
||||
await iterator.close()
|
||||
|
@ -67,7 +67,6 @@ async function processMigrateMarkupFor (
|
||||
client: MigrationClient,
|
||||
iterator: MigrationIterator<DocUpdateMessage>
|
||||
): Promise<void> {
|
||||
let processed = 0
|
||||
while (true) {
|
||||
const docs = await iterator.next(1000)
|
||||
if (docs === null || docs.length === 0) {
|
||||
@ -104,9 +103,6 @@ async function processMigrateMarkupFor (
|
||||
if (ops.length > 0) {
|
||||
await client.bulk(DOMAIN_ACTIVITY, ops)
|
||||
}
|
||||
|
||||
processed += docs.length
|
||||
console.log('...processed', processed)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -53,7 +53,6 @@ async function processMigrateMarkupFor (
|
||||
client: MigrationClient,
|
||||
iterator: MigrationIterator<Doc>
|
||||
): Promise<void> {
|
||||
let processed = 0
|
||||
while (true) {
|
||||
const docs = await iterator.next(1000)
|
||||
if (docs === null || docs.length === 0) {
|
||||
@ -88,9 +87,6 @@ async function processMigrateMarkupFor (
|
||||
if (operations.length > 0) {
|
||||
await client.bulk(domain, operations)
|
||||
}
|
||||
|
||||
processed += docs.length
|
||||
console.log('...processed', processed)
|
||||
}
|
||||
}
|
||||
|
||||
@ -122,7 +118,6 @@ async function processFixMigrateMarkupFor (
|
||||
client: MigrationClient,
|
||||
iterator: MigrationIterator<Doc>
|
||||
): Promise<void> {
|
||||
let processed = 0
|
||||
while (true) {
|
||||
const docs = await iterator.next(1000)
|
||||
if (docs === null || docs.length === 0) {
|
||||
@ -164,9 +159,6 @@ async function processFixMigrateMarkupFor (
|
||||
if (operations.length > 0) {
|
||||
await client.bulk(domain, operations)
|
||||
}
|
||||
|
||||
processed += docs.length
|
||||
console.log('...processed', processed)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14,7 +14,7 @@
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { IntlString, Asset } from '@hcengineering/platform'
|
||||
import { createEventDispatcher, ComponentType } from 'svelte'
|
||||
import { createEventDispatcher, ComponentType, afterUpdate } from 'svelte'
|
||||
|
||||
import { DateRangeMode } from '@hcengineering/core'
|
||||
import ui from '../../plugin'
|
||||
@ -86,6 +86,7 @@
|
||||
focusManager?.setFocus(idx)
|
||||
})
|
||||
}
|
||||
afterUpdate(() => dispatch('resize', input?.clientWidth))
|
||||
</script>
|
||||
|
||||
<button
|
||||
|
@ -75,6 +75,7 @@
|
||||
{width}
|
||||
{shouldIgnoreOverdue}
|
||||
on:change={handleDueDateChanged}
|
||||
on:resize
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
|
@ -49,18 +49,32 @@
|
||||
|
||||
let allWidth: number
|
||||
const widths: number[] = []
|
||||
const elements: HTMLDivElement[] = []
|
||||
|
||||
afterUpdate(() => {
|
||||
let count: number = 0
|
||||
widths.forEach((i) => (count += i))
|
||||
full = count > allWidth
|
||||
dispatch('change', { full, ckeckFilled })
|
||||
if (elements.length > 0) {
|
||||
if (items.length > 4) dispatch('resize', elements[0]?.clientWidth)
|
||||
else {
|
||||
allWidth = 0
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
if (elements[i].clientWidth !== undefined && allWidth < elements[i].clientWidth) {
|
||||
allWidth = elements[i].clientWidth
|
||||
}
|
||||
}
|
||||
dispatch('resize', allWidth + (items.length - 1) * 3)
|
||||
}
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
{#if kind === 'list' || kind === 'link'}
|
||||
{#if items.length > 4}
|
||||
<div
|
||||
bind:this={elements[0]}
|
||||
class="label-box no-shrink"
|
||||
use:tooltip={{
|
||||
component: TagsItemPresenter,
|
||||
@ -70,8 +84,8 @@
|
||||
<TagsReferencePresenter {items} {kind} />
|
||||
</div>
|
||||
{:else}
|
||||
{#each items as value}
|
||||
<div class="label-box no-shrink" title={value.title}>
|
||||
{#each items as value, i}
|
||||
<div bind:this={elements[i]} class="label-box no-shrink" title={value.title}>
|
||||
<TagReferencePresenter attr={undefined} {value} {kind} />
|
||||
</div>
|
||||
{/each}
|
||||
|
@ -18,7 +18,7 @@
|
||||
import { RuleApplyResult, getClient, getDocRules } from '@hcengineering/presentation'
|
||||
import { Component, Issue, IssueTemplate, Project, TrackerEvents } from '@hcengineering/tracker'
|
||||
import { ButtonKind, ButtonShape, ButtonSize, deviceOptionsStore as deviceInfo } from '@hcengineering/ui'
|
||||
import { createEventDispatcher } from 'svelte'
|
||||
import { createEventDispatcher, afterUpdate } from 'svelte'
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
|
||||
import { activeComponent } from '../../issues'
|
||||
@ -47,6 +47,8 @@
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
let element: HTMLDivElement
|
||||
|
||||
const handleComponentIdChanged = async (newComponentId: Ref<Component> | null | undefined) => {
|
||||
if (!isEditable || newComponentId === undefined || (!Array.isArray(value) && value.component === newComponentId)) {
|
||||
return
|
||||
@ -101,11 +103,13 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
afterUpdate(() => dispatch('resize', element?.clientWidth))
|
||||
</script>
|
||||
|
||||
{#if kind === 'list'}
|
||||
{#if !Array.isArray(value) && value.component}
|
||||
<div class={compression ? 'label-wrapper' : 'clear-mins'}>
|
||||
<div bind:this={element} class={compression ? 'label-wrapper' : 'clear-mins'}>
|
||||
<ComponentSelector
|
||||
{kind}
|
||||
{size}
|
||||
@ -127,6 +131,7 @@
|
||||
{/if}
|
||||
{:else}
|
||||
<div
|
||||
bind:this={element}
|
||||
class="flex flex-wrap clear-mins"
|
||||
class:minus-margin={kind === 'list-header'}
|
||||
class:label-wrapper={compression}
|
||||
|
@ -55,4 +55,5 @@
|
||||
{size}
|
||||
{kind}
|
||||
shouldIgnoreOverdue={ignoreOverDue}
|
||||
on:resize
|
||||
/>
|
||||
|
@ -13,6 +13,7 @@
|
||||
// limitations under the License.
|
||||
-->
|
||||
<script lang="ts">
|
||||
import { createEventDispatcher, afterUpdate } from 'svelte'
|
||||
import { WithLookup } from '@hcengineering/core'
|
||||
import { getClient } from '@hcengineering/presentation'
|
||||
import type { Issue } from '@hcengineering/tracker'
|
||||
@ -26,13 +27,19 @@
|
||||
export let disabled: boolean = false
|
||||
export let maxWidth: string | undefined = undefined
|
||||
|
||||
let element: HTMLSpanElement
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
$: presenters =
|
||||
value !== undefined ? getClient().getHierarchy().findMixinMixins(value, view.mixin.ObjectPresenter) : []
|
||||
|
||||
afterUpdate(() => dispatch('resize', element?.clientWidth))
|
||||
</script>
|
||||
|
||||
{#if value}
|
||||
{#if value && presenters.length > 0}
|
||||
<span
|
||||
class="presenter-label select-text p-1"
|
||||
bind:this={element}
|
||||
class="presenter-label select-text"
|
||||
class:with-margin={shouldUseMargin}
|
||||
class:list={kind === 'list'}
|
||||
style:max-width={maxWidth}
|
||||
@ -41,7 +48,7 @@
|
||||
{#if presenters.length > 0}
|
||||
<div class="flex-row-center">
|
||||
{#each presenters as mixinPresenter}
|
||||
<Component is={mixinPresenter.presenter} props={{ value }} />
|
||||
<Component is={mixinPresenter.presenter} props={{ value, kind }} />
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
@ -50,7 +57,6 @@
|
||||
|
||||
<style lang="scss">
|
||||
.presenter-label {
|
||||
overflow: hidden;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
flex-shrink: 1;
|
||||
|
@ -25,7 +25,7 @@
|
||||
DatePresenter,
|
||||
deviceOptionsStore as deviceInfo
|
||||
} from '@hcengineering/ui'
|
||||
import { createEventDispatcher } from 'svelte'
|
||||
import { createEventDispatcher, afterUpdate } from 'svelte'
|
||||
import { activeMilestone } from '../../issues'
|
||||
import tracker from '../../plugin'
|
||||
import MilestoneSelector from './MilestoneSelector.svelte'
|
||||
@ -51,6 +51,8 @@
|
||||
const client = getClient()
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
let element: HTMLDivElement
|
||||
|
||||
const handleMilestoneIdChanged = async (newMilestoneId: Ref<Milestone> | null | undefined) => {
|
||||
if (!isEditable || newMilestoneId === undefined || (!Array.isArray(value) && value.milestone === newMilestoneId)) {
|
||||
return
|
||||
@ -86,11 +88,12 @@
|
||||
$: _space = space ?? (!Array.isArray(value) ? value.space : { $in: Array.from(new Set(value.map((it) => it.space))) })
|
||||
|
||||
$: twoRows = $deviceInfo.twoRows
|
||||
afterUpdate(() => dispatch('resize', element?.clientWidth))
|
||||
</script>
|
||||
|
||||
{#if kind === 'list'}
|
||||
{#if !Array.isArray(value) && value.milestone}
|
||||
<div class={compression ? 'label-wrapper' : 'clear-mins'}>
|
||||
<div bind:this={element} class={compression ? 'label-wrapper' : 'clear-mins'}>
|
||||
<MilestoneSelector
|
||||
{kind}
|
||||
{size}
|
||||
@ -112,6 +115,7 @@
|
||||
{/if}
|
||||
{:else}
|
||||
<div
|
||||
bind:this={element}
|
||||
class="flex flex-wrap clear-mins"
|
||||
class:minus-margin={kind === 'list-header'}
|
||||
class:label-wrapper={compression}
|
||||
|
@ -60,11 +60,11 @@
|
||||
>
|
||||
<svelte:fragment slot="content">
|
||||
{#if title}
|
||||
<span class="caption-color overflow-label pointer-events-none">{title}</span>
|
||||
<span class="label caption-color overflow-label pointer-events-none">{title}</span>
|
||||
{:else if value}
|
||||
<span class="caption-color overflow-label pointer-events-none">{value}</span>
|
||||
<span class="label caption-color overflow-label pointer-events-none">{value}</span>
|
||||
{:else}
|
||||
<span class="content-dark-color pointer-events-none">
|
||||
<span class="label content-dark-color pointer-events-none">
|
||||
<Label label={placeholder} />
|
||||
</span>
|
||||
{/if}
|
||||
|
@ -41,7 +41,10 @@
|
||||
}
|
||||
|
||||
function save () {
|
||||
filter.value = search ? [search] : []
|
||||
if (search == null || search === '') {
|
||||
return
|
||||
}
|
||||
filter.value = [search]
|
||||
|
||||
onChange(filter)
|
||||
dispatch('close')
|
||||
@ -61,5 +64,5 @@
|
||||
on:change
|
||||
/>
|
||||
</div>
|
||||
<Button shape="filter" label={view.string.Apply} on:click={save} />
|
||||
<Button shape="filter" label={view.string.Apply} disabled={search === ''} on:click={save} />
|
||||
</div>
|
||||
|
@ -84,6 +84,11 @@
|
||||
onMount(() => {
|
||||
dispatch('on-mount')
|
||||
})
|
||||
let minWidth: number | undefined = undefined
|
||||
const sizes = new Map<number, number>()
|
||||
const calcSizes = (): void => {
|
||||
minWidth = sizes.size > 0 ? Array.from(sizes.values()).reduce((a, b) => a + b, 0) : undefined
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
@ -147,14 +152,19 @@
|
||||
{/if}
|
||||
<GrowPresenter />
|
||||
{#if !compactMode}
|
||||
<div class="compression-bar">
|
||||
{#each model.filter((p) => p.displayProps?.compression === true) as attrModel}
|
||||
<div class="compression-bar" style:min-width={`${minWidth}px`}>
|
||||
{#each model.filter((p) => p.displayProps?.compression === true) as attrModel, index}
|
||||
<ListPresenter
|
||||
{docObject}
|
||||
attributeModel={attrModel}
|
||||
props={getProps(props, $restrictionStore.readonly)}
|
||||
value={getObjectValue(attrModel.key, docObject)}
|
||||
onChange={getOnChange(docObject, attrModel)}
|
||||
on:resize={(e) => {
|
||||
if (e.detail == null) return
|
||||
sizes.set(index, e.detail)
|
||||
calcSizes()
|
||||
}}
|
||||
/>
|
||||
{/each}
|
||||
</div>
|
||||
|
@ -13,6 +13,7 @@
|
||||
// limitations under the License.
|
||||
-->
|
||||
<script lang="ts">
|
||||
import { createEventDispatcher } from 'svelte'
|
||||
import core, { Doc } from '@hcengineering/core'
|
||||
import { AttributeModel } from '@hcengineering/view'
|
||||
import { FixedColumn } from '../..'
|
||||
@ -26,6 +27,8 @@
|
||||
export let hideDivider: boolean = false
|
||||
export let compactMode: boolean = false
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
$: dp = attributeModel?.displayProps
|
||||
|
||||
function joinProps (attribute: AttributeModel, object: Doc, props: Record<string, any>) {
|
||||
@ -35,6 +38,10 @@
|
||||
}
|
||||
return { object, ...clearAttributeProps, space: object.space, ...props }
|
||||
}
|
||||
const translateSize = (e: CustomEvent): void => {
|
||||
if (e.detail === undefined) return
|
||||
dispatch('resize', e.detail)
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if dp?.dividerBefore === true && !hideDivider}
|
||||
@ -49,6 +56,7 @@
|
||||
kind={'list'}
|
||||
{compactMode}
|
||||
{...joinProps(attributeModel, docObject, props)}
|
||||
on:resize={translateSize}
|
||||
/>
|
||||
</FixedColumn>
|
||||
{:else}
|
||||
@ -59,5 +67,6 @@
|
||||
kind={'list'}
|
||||
{compactMode}
|
||||
{...joinProps(attributeModel, docObject, props)}
|
||||
on:resize={translateSize}
|
||||
/>
|
||||
{/if}
|
||||
|
@ -1786,6 +1786,9 @@ async function createPersonAccount (
|
||||
personAccountId
|
||||
)
|
||||
} else {
|
||||
if (roleOrder[existingAccount.role] < roleOrder[role]) {
|
||||
await ops.update(existingAccount, { role })
|
||||
}
|
||||
const person = await ops.findOne(contact.class.Person, { _id: existingAccount.person })
|
||||
if (person === undefined) {
|
||||
// Employee was deleted, let's restore it.
|
||||
@ -1794,10 +1797,16 @@ async function createPersonAccount (
|
||||
await ops.updateDoc(contact.class.PersonAccount, existingAccount.space, existingAccount._id, {
|
||||
person: employeeId
|
||||
})
|
||||
} else if (ops.getHierarchy().hasMixin(person, contact.mixin.Employee)) {
|
||||
const employee = ops.getHierarchy().as(person, contact.mixin.Employee)
|
||||
if (!employee.active) {
|
||||
await ops.update(employee, {
|
||||
} else if (shouldCreateEmployee) {
|
||||
if (ops.getHierarchy().hasMixin(person, contact.mixin.Employee)) {
|
||||
const employee = ops.getHierarchy().as(person, contact.mixin.Employee)
|
||||
if (!employee.active) {
|
||||
await ops.update(employee, {
|
||||
active: true
|
||||
})
|
||||
}
|
||||
} else {
|
||||
await ops.createMixin(person._id, contact.class.Person, contact.space.Contacts, contact.mixin.Employee, {
|
||||
active: true
|
||||
})
|
||||
}
|
||||
|
@ -71,8 +71,11 @@ export class FullTextIndex implements WithFind {
|
||||
}
|
||||
|
||||
async close (): Promise<void> {
|
||||
this.indexer.triggerIndexing()
|
||||
if (!this.upgrade) {
|
||||
await this.indexer.cancel()
|
||||
} else {
|
||||
await this.indexer.processUpload(this.indexer.metrics)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -351,7 +351,9 @@ class TSessionManager implements SessionManager {
|
||||
version: this.modelVersion,
|
||||
workspaceVersion: versionToString(workspaceInfo.version),
|
||||
workspace: workspaceInfo.workspaceId,
|
||||
workspaceUrl: workspaceInfo.workspaceUrl
|
||||
workspaceUrl: workspaceInfo.workspaceUrl,
|
||||
email: token.email,
|
||||
extra: JSON.stringify(token.extra ?? {})
|
||||
})
|
||||
// Version mismatch, return upgrading.
|
||||
return { upgrade: true, upgradeInfo: workspaceInfo.upgrade }
|
||||
|
@ -14,9 +14,7 @@
|
||||
//
|
||||
|
||||
import core, {
|
||||
BackupClient,
|
||||
Branding,
|
||||
Client as CoreClient,
|
||||
coreId,
|
||||
DOMAIN_BENCHMARK,
|
||||
DOMAIN_MIGRATION,
|
||||
@ -34,13 +32,13 @@ import core, {
|
||||
TxOperations,
|
||||
WorkspaceId,
|
||||
WorkspaceIdWithUrl,
|
||||
type Client,
|
||||
type Doc,
|
||||
type Ref,
|
||||
type WithLookup
|
||||
} from '@hcengineering/core'
|
||||
import { consoleModelLogger, MigrateOperation, ModelLogger, tryMigrate } from '@hcengineering/model'
|
||||
import { DomainIndexHelperImpl, Pipeline, StorageAdapter, type DbAdapter } from '@hcengineering/server-core'
|
||||
import { connect } from './connect'
|
||||
import { InitScript, WorkspaceInitializer } from './initializer'
|
||||
import toolPlugin from './plugin'
|
||||
import { MigrateClientImpl } from './upgrade'
|
||||
@ -165,23 +163,15 @@ export async function updateModel (
|
||||
try {
|
||||
let i = 0
|
||||
for (const op of migrateOperations) {
|
||||
logger.log('Migrate', { name: op[0] })
|
||||
const st = Date.now()
|
||||
await op[1].upgrade(migrateState, async () => connection as any, logger)
|
||||
const tdelta = Date.now() - st
|
||||
if (tdelta > 0) {
|
||||
logger.log('Create', { name: op[0], time: tdelta })
|
||||
}
|
||||
i++
|
||||
await progress((((100 / migrateOperations.length) * i) / 100) * 30)
|
||||
await progress((((100 / migrateOperations.length) * i) / 100) * 100)
|
||||
}
|
||||
|
||||
// Create update indexes
|
||||
await createUpdateIndexes(
|
||||
ctx,
|
||||
connection.getHierarchy(),
|
||||
connection.getModel(),
|
||||
pipeline,
|
||||
async (value) => {
|
||||
await progress(30 + (Math.min(value, 100) / 100) * 70)
|
||||
},
|
||||
workspaceId
|
||||
)
|
||||
await progress(100)
|
||||
} catch (e: any) {
|
||||
logger.error('error', { error: e })
|
||||
@ -203,6 +193,7 @@ export async function initializeWorkspace (
|
||||
): Promise<void> {
|
||||
const initWS = branding?.initWorkspace ?? getMetadata(toolPlugin.metadata.InitWorkspace)
|
||||
const scriptUrl = getMetadata(toolPlugin.metadata.InitScriptURL)
|
||||
ctx.info('Init script details', { scriptUrl, initWS })
|
||||
if (initWS === undefined || scriptUrl === undefined) return
|
||||
try {
|
||||
// `https://raw.githubusercontent.com/hcengineering/init/main/script.yaml`
|
||||
@ -237,11 +228,12 @@ export async function upgradeModel (
|
||||
workspaceId: WorkspaceIdWithUrl,
|
||||
txes: Tx[],
|
||||
pipeline: Pipeline,
|
||||
connection: Client,
|
||||
storageAdapter: StorageAdapter,
|
||||
migrateOperations: [string, MigrateOperation][],
|
||||
logger: ModelLogger = consoleModelLogger,
|
||||
progress: (value: number) => Promise<void>,
|
||||
forceIndexes: boolean = false
|
||||
updateIndexes: 'perform' | 'skip' | 'disable' = 'skip'
|
||||
): Promise<Tx[]> {
|
||||
if (txes.some((tx) => tx.objectSpace !== core.space.Model)) {
|
||||
throw Error('Model txes must target only core.space.Model')
|
||||
@ -308,87 +300,69 @@ export async function upgradeModel (
|
||||
workspaceId
|
||||
)
|
||||
}
|
||||
if (forceIndexes) {
|
||||
if (updateIndexes === 'perform') {
|
||||
await upgradeIndexes()
|
||||
}
|
||||
|
||||
await ctx.with('migrate', {}, async (ctx) => {
|
||||
let i = 0
|
||||
for (const op of migrateOperations) {
|
||||
const t = Date.now()
|
||||
try {
|
||||
const t = Date.now()
|
||||
await ctx.with(op[0], {}, async () => {
|
||||
await op[1].migrate(migrateClient, logger)
|
||||
})
|
||||
const tdelta = Date.now() - t
|
||||
if (tdelta > 0) {
|
||||
logger.log('migrate:', { workspaceId: workspaceId.name, operation: op[0], time: Date.now() - t })
|
||||
}
|
||||
} catch (err: any) {
|
||||
logger.error(`error during migrate: ${op[0]} ${err.message}`, err)
|
||||
throw err
|
||||
}
|
||||
logger.log('migrate:', { workspaceId: workspaceId.name, operation: op[0], time: Date.now() - t })
|
||||
await progress(20 + ((100 / migrateOperations.length) * i * 20) / 100)
|
||||
i++
|
||||
}
|
||||
|
||||
await tryMigrate(migrateClient, coreId, [
|
||||
{
|
||||
state: 'indexes-v5',
|
||||
func: upgradeIndexes
|
||||
}
|
||||
])
|
||||
if (updateIndexes === 'skip') {
|
||||
await tryMigrate(migrateClient, coreId, [
|
||||
{
|
||||
state: 'indexes-v5',
|
||||
func: upgradeIndexes
|
||||
}
|
||||
])
|
||||
}
|
||||
})
|
||||
|
||||
logger.log('Apply upgrade operations', { workspaceId: workspaceId.name })
|
||||
|
||||
let connection: (CoreClient & BackupClient) | undefined
|
||||
const getUpgradeClient = async (): Promise<CoreClient & BackupClient> =>
|
||||
await ctx.with('connect-platform', {}, async (ctx) => {
|
||||
if (connection !== undefined) {
|
||||
return connection
|
||||
}
|
||||
connection = (await connect(
|
||||
transactorUrl,
|
||||
workspaceId,
|
||||
undefined,
|
||||
{
|
||||
mode: 'backup',
|
||||
model: 'upgrade',
|
||||
admin: 'true'
|
||||
},
|
||||
model
|
||||
)) as CoreClient & BackupClient
|
||||
return connection
|
||||
})
|
||||
try {
|
||||
await ctx.with('upgrade', {}, async (ctx) => {
|
||||
let i = 0
|
||||
for (const op of migrateOperations) {
|
||||
const t = Date.now()
|
||||
await ctx.with(op[0], {}, () => op[1].upgrade(migrateState, getUpgradeClient, logger))
|
||||
logger.log('upgrade:', { operation: op[0], time: Date.now() - t, workspaceId: workspaceId.name })
|
||||
await progress(60 + ((100 / migrateOperations.length) * i * 30) / 100)
|
||||
i++
|
||||
}
|
||||
})
|
||||
|
||||
if (connection === undefined) {
|
||||
// We need to send reboot for workspace
|
||||
ctx.info('send force close', { workspace: workspaceId.name, transactorUrl })
|
||||
const serverEndpoint = transactorUrl.replaceAll('wss://', 'https://').replace('ws://', 'http://')
|
||||
const token = generateToken(systemAccountEmail, workspaceId, { admin: 'true' })
|
||||
try {
|
||||
await fetch(
|
||||
serverEndpoint + `/api/v1/manage?token=${token}&operation=force-close&wsId=${toWorkspaceString(workspaceId)}`,
|
||||
{
|
||||
method: 'PUT'
|
||||
}
|
||||
)
|
||||
} catch (err: any) {
|
||||
// Ignore error if transactor is not yet ready
|
||||
await ctx.with('upgrade', {}, async (ctx) => {
|
||||
let i = 0
|
||||
for (const op of migrateOperations) {
|
||||
const t = Date.now()
|
||||
await ctx.with(op[0], {}, () => op[1].upgrade(migrateState, async () => connection, logger))
|
||||
const tdelta = Date.now() - t
|
||||
if (tdelta > 0) {
|
||||
logger.log('upgrade:', { operation: op[0], time: tdelta, workspaceId: workspaceId.name })
|
||||
}
|
||||
await progress(60 + ((100 / migrateOperations.length) * i * 30) / 100)
|
||||
i++
|
||||
}
|
||||
} finally {
|
||||
await connection?.sendForceClose()
|
||||
await connection?.close()
|
||||
})
|
||||
|
||||
// We need to send reboot for workspace
|
||||
ctx.info('send force close', { workspace: workspaceId.name, transactorUrl })
|
||||
const serverEndpoint = transactorUrl.replaceAll('wss://', 'https://').replace('ws://', 'http://')
|
||||
const token = generateToken(systemAccountEmail, workspaceId, { admin: 'true' })
|
||||
try {
|
||||
await fetch(
|
||||
serverEndpoint + `/api/v1/manage?token=${token}&operation=force-close&wsId=${toWorkspaceString(workspaceId)}`,
|
||||
{
|
||||
method: 'PUT'
|
||||
}
|
||||
)
|
||||
} catch (err: any) {
|
||||
// Ignore error if transactor is not yet ready
|
||||
}
|
||||
return model
|
||||
}
|
||||
@ -407,7 +381,13 @@ async function prepareMigrationClient (
|
||||
const migrateClient = new MigrateClientImpl(pipeline, hierarchy, model, logger, storageAdapter, workspaceId)
|
||||
const states = await migrateClient.find<MigrationState>(DOMAIN_MIGRATION, { _class: core.class.MigrationState })
|
||||
const sts = Array.from(groupByArray(states, (it) => it.plugin).entries())
|
||||
const migrateState = new Map(sts.map((it) => [it[0], new Set(it[1].map((q) => q.state))]))
|
||||
|
||||
const _toSet = (vals: WithLookup<MigrationState>[]): Set<string> => {
|
||||
return new Set(vals.map((q) => q.state))
|
||||
}
|
||||
|
||||
const migrateState = new Map<string, Set<string>>(sts.map((it) => [it[0], _toSet(it[1])]))
|
||||
// const migrateState = new Map(sts.map((it) => [it[0], new Set(it[1].map((q) => q.state))]))
|
||||
migrateClient.migrateState = migrateState
|
||||
|
||||
return { migrateClient, migrateState }
|
||||
|
@ -90,6 +90,8 @@ export function serveWorkspaceAccount (
|
||||
|
||||
setMetadata(serverNotification.metadata.InboxOnlyNotifications, true)
|
||||
|
||||
let canceled = false
|
||||
|
||||
const worker = new WorkspaceWorker(
|
||||
version,
|
||||
txes,
|
||||
@ -100,17 +102,22 @@ export function serveWorkspaceAccount (
|
||||
brandings
|
||||
)
|
||||
|
||||
void worker.start(measureCtx, {
|
||||
errorHandler: async (ws, err) => {
|
||||
Analytics.handleError(err)
|
||||
void worker.start(
|
||||
measureCtx,
|
||||
{
|
||||
errorHandler: async (ws, err) => {
|
||||
Analytics.handleError(err)
|
||||
},
|
||||
force: false,
|
||||
console: false,
|
||||
logs: 'upgrade-logs',
|
||||
waitTimeout
|
||||
},
|
||||
force: false,
|
||||
console: false,
|
||||
logs: 'upgrade-logs',
|
||||
waitTimeout
|
||||
})
|
||||
() => canceled
|
||||
)
|
||||
|
||||
const close = (): void => {
|
||||
canceled = true
|
||||
onClose?.()
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ export class WorkspaceWorker {
|
||||
wakeup: () => void = () => {}
|
||||
defaultWakeup: () => void = () => {}
|
||||
|
||||
async start (ctx: MeasureContext, opt: WorkspaceOptions): Promise<void> {
|
||||
async start (ctx: MeasureContext, opt: WorkspaceOptions, isCanceled: () => boolean): Promise<void> {
|
||||
this.defaultWakeup = () => {
|
||||
ctx.info("I'm busy", { version: this.version, region: this.region })
|
||||
}
|
||||
@ -92,7 +92,7 @@ export class WorkspaceWorker {
|
||||
|
||||
ctx.info('Successfully connected to the account service')
|
||||
|
||||
while (true) {
|
||||
while (!isCanceled()) {
|
||||
await this.waitForAvailableThread()
|
||||
|
||||
const workspace = await ctx.with('get-pending-workspace', {}, async (ctx) => {
|
||||
|
@ -131,23 +131,20 @@ export async function createWorkspace (
|
||||
usePassedCtx: true
|
||||
})
|
||||
const txAdapter = await txFactory(ctx, hierarchy, dbUrl, wsId, modelDb, storageAdapter)
|
||||
|
||||
await childLogger.withLog('init-workspace', {}, async (ctx) => {
|
||||
await initModel(ctx, wsId, txes, txAdapter, storageAdapter, ctxModellogger, async (value) => {
|
||||
await handleWsEvent?.('progress', version, 10 + Math.round((Math.min(value, 100) / 100) * 10))
|
||||
})
|
||||
await initModel(ctx, wsId, txes, txAdapter, storageAdapter, ctxModellogger, async (value) => {})
|
||||
})
|
||||
|
||||
const client = new TxOperations(wrapPipeline(ctx, pipeline, wsUrl), core.account.ConfigUser)
|
||||
|
||||
await updateModel(ctx, wsId, migrationOperation, client, pipeline, ctxModellogger, async (value) => {
|
||||
await handleWsEvent?.('progress', version, 20 + Math.round((Math.min(value, 100) / 100) * 10))
|
||||
await handleWsEvent?.('progress', version, 10 + Math.round((Math.min(value, 100) / 100) * 10))
|
||||
})
|
||||
|
||||
ctx.info('Starting init script if any')
|
||||
await initializeWorkspace(ctx, branding, wsUrl, storageAdapter, client, ctxModellogger, async (value) => {
|
||||
ctx.info('Init script progress', { value })
|
||||
await handleWsEvent?.('progress', version, 30 + Math.round((Math.min(value, 100) / 100) * 60))
|
||||
await handleWsEvent?.('progress', version, 20 + Math.round((Math.min(value, 100) / 100) * 60))
|
||||
})
|
||||
|
||||
await upgradeWorkspaceWith(
|
||||
@ -157,14 +154,15 @@ export async function createWorkspace (
|
||||
migrationOperation,
|
||||
workspaceInfo,
|
||||
pipeline,
|
||||
client,
|
||||
storageAdapter,
|
||||
ctxModellogger,
|
||||
async (event, version, value) => {
|
||||
ctx.info('Init script progress', { event, value })
|
||||
await handleWsEvent?.('progress', version, 90 + Math.round((Math.min(value, 100) / 100) * 10))
|
||||
await handleWsEvent?.('progress', version, 80 + Math.round((Math.min(value, 100) / 100) * 20))
|
||||
},
|
||||
false,
|
||||
false
|
||||
'disable'
|
||||
)
|
||||
|
||||
await handleWsEvent?.('create-done', version, 100, '')
|
||||
@ -216,6 +214,12 @@ export async function upgradeWorkspace (
|
||||
return
|
||||
}
|
||||
|
||||
const wsUrl: WorkspaceIdWithUrl = {
|
||||
name: ws.workspace,
|
||||
workspaceName: ws.workspaceName ?? '',
|
||||
workspaceUrl: ws.workspaceUrl ?? ''
|
||||
}
|
||||
|
||||
await upgradeWorkspaceWith(
|
||||
ctx,
|
||||
version,
|
||||
@ -223,11 +227,12 @@ export async function upgradeWorkspace (
|
||||
migrationOperation,
|
||||
ws,
|
||||
pipeline,
|
||||
wrapPipeline(ctx, pipeline, wsUrl),
|
||||
storageAdapter,
|
||||
logger,
|
||||
handleWsEvent,
|
||||
forceUpdate,
|
||||
forceIndexes,
|
||||
forceIndexes ? 'perform' : 'skip',
|
||||
external
|
||||
)
|
||||
} finally {
|
||||
@ -246,6 +251,7 @@ export async function upgradeWorkspaceWith (
|
||||
migrationOperation: [string, MigrateOperation][],
|
||||
ws: BaseWorkspaceInfo,
|
||||
pipeline: Pipeline,
|
||||
connection: Client,
|
||||
storageAdapter: StorageAdapter,
|
||||
logger: ModelLogger = consoleModelLogger,
|
||||
handleWsEvent?: (
|
||||
@ -255,7 +261,7 @@ export async function upgradeWorkspaceWith (
|
||||
message?: string
|
||||
) => Promise<void>,
|
||||
forceUpdate: boolean = true,
|
||||
forceIndexes: boolean = false,
|
||||
updateIndexes: 'perform' | 'skip' | 'disable' = 'skip',
|
||||
external: boolean = false
|
||||
): Promise<void> {
|
||||
const versionStr = versionToString(version)
|
||||
@ -310,13 +316,14 @@ export async function upgradeWorkspaceWith (
|
||||
wsId,
|
||||
txes,
|
||||
pipeline,
|
||||
connection,
|
||||
storageAdapter,
|
||||
migrationOperation,
|
||||
logger,
|
||||
async (value) => {
|
||||
progress = value
|
||||
},
|
||||
forceIndexes
|
||||
updateIndexes
|
||||
)
|
||||
|
||||
await handleWsEvent?.('upgrade-done', version, 100, '')
|
||||
|
@ -2,11 +2,13 @@
|
||||
import { Issue } from '@hcengineering/tracker'
|
||||
|
||||
import { getClient } from '@hcengineering/presentation'
|
||||
import type { ButtonKind } from '@hcengineering/ui'
|
||||
import { HyperlinkEditor } from '@hcengineering/view-resources'
|
||||
import github from '../../plugin'
|
||||
import { integrationRepositories } from '../utils'
|
||||
|
||||
export let value: Issue
|
||||
export let kind: ButtonKind = 'ghost'
|
||||
|
||||
$: ghIssue = getClient().getHierarchy().asIf(value, github.mixin.GithubIssue)
|
||||
|
||||
@ -14,14 +16,12 @@
|
||||
</script>
|
||||
|
||||
{#if ghIssue !== undefined && ghIssue.url !== '' && repository !== undefined}
|
||||
<div class="flex flex-row-center">
|
||||
<HyperlinkEditor
|
||||
readonly
|
||||
icon={github.icon.Github}
|
||||
kind={'ghost'}
|
||||
value={ghIssue.url}
|
||||
placeholder={github.string.Issue}
|
||||
title={`${repository.name}`}
|
||||
/>
|
||||
</div>
|
||||
<HyperlinkEditor
|
||||
readonly
|
||||
icon={github.icon.Github}
|
||||
{kind}
|
||||
value={ghIssue.url}
|
||||
placeholder={github.string.Issue}
|
||||
title={`${repository.name}`}
|
||||
/>
|
||||
{/if}
|
||||
|
@ -271,7 +271,6 @@ async function processMigrateMarkupFor (
|
||||
client: MigrationClient,
|
||||
iterator: MigrationIterator<DocSyncInfo>
|
||||
): Promise<void> {
|
||||
let processed = 0
|
||||
while (true) {
|
||||
const docs = await iterator.next(1000)
|
||||
if (docs === null || docs.length === 0) {
|
||||
@ -298,9 +297,6 @@ async function processMigrateMarkupFor (
|
||||
if (operations.length > 0) {
|
||||
await client.bulk(DOMAIN_GITHUB, operations)
|
||||
}
|
||||
|
||||
processed += docs.length
|
||||
console.log('...processed', processed)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -162,10 +162,15 @@ export class PlatformWorker {
|
||||
errors = true
|
||||
}
|
||||
await new Promise<void>((resolve) => {
|
||||
this.triggerCheckWorkspaces = resolve
|
||||
this.ctx.info('Workspaces check triggered')
|
||||
this.triggerCheckWorkspaces = () => {
|
||||
this.ctx.info('Workspaces check triggered')
|
||||
this.triggerCheckWorkspaces = () => {}
|
||||
resolve()
|
||||
}
|
||||
if (errors) {
|
||||
setTimeout(resolve, 5000)
|
||||
setTimeout(() => {
|
||||
this.triggerCheckWorkspaces()
|
||||
}, 5000)
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -650,6 +655,9 @@ export class PlatformWorker {
|
||||
}
|
||||
|
||||
private async checkWorkspaces (): Promise<boolean> {
|
||||
this.ctx.info('************************* Check workspaces ************************* ', {
|
||||
workspaces: this.clients.size
|
||||
})
|
||||
let workspaces = await this.getWorkspaces()
|
||||
if (process.env.GITHUB_USE_WS !== undefined) {
|
||||
workspaces = [process.env.GITHUB_USE_WS]
|
||||
@ -660,7 +668,14 @@ export class PlatformWorker {
|
||||
let errors = 0
|
||||
let idx = 0
|
||||
const connecting = new Map<string, number>()
|
||||
const st = Date.now()
|
||||
const connectingInfo = setInterval(() => {
|
||||
this.ctx.info('****** connecting to workspaces ******', {
|
||||
connecting: connecting.size,
|
||||
time: Date.now() - st,
|
||||
workspaces: workspaces.length,
|
||||
queue: rateLimiter.processingQueue.size
|
||||
})
|
||||
for (const [c, d] of connecting.entries()) {
|
||||
this.ctx.info('connecting to workspace', { workspace: c, time: Date.now() - d })
|
||||
}
|
||||
@ -727,7 +742,7 @@ export class PlatformWorker {
|
||||
}
|
||||
)
|
||||
if (worker !== undefined) {
|
||||
workerCtx.info('Register worker Done', {
|
||||
workerCtx.info('************************* Register worker Done ************************* ', {
|
||||
workspaceId: workspaceInfo.workspaceId,
|
||||
workspace: workspaceInfo.workspace,
|
||||
index: widx,
|
||||
@ -736,12 +751,15 @@ export class PlatformWorker {
|
||||
// No if no integration, we will try connect one more time in a time period
|
||||
this.clients.set(workspace, worker)
|
||||
} else {
|
||||
workerCtx.info('Failed Register worker, timeout or integrations removed', {
|
||||
workspaceId: workspaceInfo.workspaceId,
|
||||
workspace: workspaceInfo.workspace,
|
||||
index: widx,
|
||||
total: workspaces.length
|
||||
})
|
||||
workerCtx.info(
|
||||
'************************* Failed Register worker, timeout or integrations removed *************************',
|
||||
{
|
||||
workspaceId: workspaceInfo.workspaceId,
|
||||
workspace: workspaceInfo.workspace,
|
||||
index: widx,
|
||||
total: workspaces.length
|
||||
}
|
||||
)
|
||||
errors++
|
||||
}
|
||||
} catch (e: any) {
|
||||
@ -754,6 +772,10 @@ export class PlatformWorker {
|
||||
}
|
||||
})
|
||||
}
|
||||
this.ctx.info('************************* Waiting To complete Workspace processing ************************* ', {
|
||||
workspaces: this.clients.size,
|
||||
rateLimiter: rateLimiter.processingQueue.size
|
||||
})
|
||||
try {
|
||||
await rateLimiter.waitProcessing()
|
||||
} catch (e: any) {
|
||||
@ -761,6 +783,11 @@ export class PlatformWorker {
|
||||
errors++
|
||||
}
|
||||
clearInterval(connectingInfo)
|
||||
|
||||
this.ctx.info('************************* Check close deleted ************************* ', {
|
||||
workspaces: this.clients.size,
|
||||
deleted: toDelete.size
|
||||
})
|
||||
// Close deleted workspaces
|
||||
for (const deleted of Array.from(toDelete.keys())) {
|
||||
const ws = this.clients.get(deleted)
|
||||
@ -768,7 +795,7 @@ export class PlatformWorker {
|
||||
try {
|
||||
this.ctx.info('workspace removed from tracking list', { workspace: deleted })
|
||||
this.clients.delete(deleted)
|
||||
await ws.close()
|
||||
void ws.close()
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
errors++
|
||||
|
@ -127,8 +127,9 @@ export class GithubWorker implements IntegrationManager {
|
||||
this.closing = true
|
||||
this.ctx.warn('Closing', { workspace: this.workspace.name })
|
||||
this.triggerSync()
|
||||
await this.syncPromise
|
||||
this.ctx.warn('ClosingDone', { workspace: this.workspace.name })
|
||||
await Promise.all([await this.syncPromise, new Promise<void>((resolve) => setTimeout(resolve, 5000))])
|
||||
|
||||
this.ctx.warn('Closing Done', { workspace: this.workspace.name })
|
||||
await this.client.close()
|
||||
}
|
||||
|
||||
|
@ -3,12 +3,10 @@ CREATE SCHEMA IF NOT EXISTS blob;
|
||||
|
||||
DROP TABLE IF EXISTS blob.blob;
|
||||
DROP TABLE IF EXISTS blob.data;
|
||||
DROP TYPE IF EXISTS blob.content_type;
|
||||
DROP TYPE IF EXISTS blob.location;
|
||||
|
||||
-- B L O B
|
||||
|
||||
CREATE TYPE blob.content_type AS ENUM ('application','audio','font','image','model','text','video');
|
||||
CREATE TYPE blob.location AS ENUM ('kv', 'weur', 'eeur', 'wnam', 'enam', 'apac');
|
||||
|
||||
\echo "Creating blob.data..."
|
||||
@ -17,8 +15,7 @@ CREATE TABLE blob.data (
|
||||
location blob.location NOT NULL,
|
||||
size INT8 NOT NULL,
|
||||
filename UUID NOT NULL,
|
||||
type blob.content_type NOT NULL,
|
||||
subtype STRING(64) NOT NULL,
|
||||
type STRING(255) NOT NULL,
|
||||
CONSTRAINT pk_data PRIMARY KEY (hash, location)
|
||||
);
|
||||
|
||||
|
@ -163,7 +163,6 @@ async function saveBlob (
|
||||
const { location, bucket } = selectStorage(env, workspace)
|
||||
|
||||
const size = file.size
|
||||
const [mimetype, subtype] = type.split('/')
|
||||
const httpMetadata = { contentType: type, cacheControl }
|
||||
const filename = getUniqueFilename()
|
||||
|
||||
@ -179,7 +178,7 @@ async function saveBlob (
|
||||
} else {
|
||||
await bucket.put(filename, file, { httpMetadata })
|
||||
await sql.begin((sql) => [
|
||||
db.createData(sql, { hash, location, filename, type: mimetype, subtype, size }),
|
||||
db.createData(sql, { hash, location, filename, type, size }),
|
||||
db.createBlob(sql, { workspace, name, hash, location })
|
||||
])
|
||||
}
|
||||
@ -201,7 +200,7 @@ async function saveBlob (
|
||||
} else {
|
||||
// Otherwise register a new hash and blob
|
||||
await sql.begin((sql) => [
|
||||
db.createData(sql, { hash, location, filename, type: mimetype, subtype, size }),
|
||||
db.createData(sql, { hash, location, filename, type, size }),
|
||||
db.createBlob(sql, { workspace, name, hash, location })
|
||||
])
|
||||
}
|
||||
@ -227,9 +226,8 @@ export async function handleBlobUploaded (env: Env, workspace: string, name: str
|
||||
} else {
|
||||
const size = object.size
|
||||
const type = object.httpMetadata.contentType ?? 'application/octet-stream'
|
||||
const [mimetype, subtype] = type.split('/')
|
||||
|
||||
await db.createData(sql, { hash, location, filename, type: mimetype, subtype, size })
|
||||
await db.createData(sql, { hash, location, filename, type, size })
|
||||
await db.createBlob(sql, { workspace, name, hash, location })
|
||||
}
|
||||
}
|
||||
|
@ -25,7 +25,6 @@ export interface BlobDataRecord extends BlobDataId {
|
||||
filename: UUID
|
||||
size: number
|
||||
type: string
|
||||
subtype: string
|
||||
}
|
||||
|
||||
export interface BlobId {
|
||||
@ -47,7 +46,7 @@ export async function getData (sql: postgres.Sql, dataId: BlobDataId): Promise<B
|
||||
const { hash, location } = dataId
|
||||
|
||||
const rows = await sql<BlobDataRecord[]>`
|
||||
SELECT hash, location, filename, size, type, subtype
|
||||
SELECT hash, location, filename, size, type
|
||||
FROM blob.data
|
||||
WHERE hash = ${hash} AND location = ${location}
|
||||
`
|
||||
@ -60,11 +59,11 @@ export async function getData (sql: postgres.Sql, dataId: BlobDataId): Promise<B
|
||||
}
|
||||
|
||||
export async function createData (sql: postgres.Sql, data: BlobDataRecord): Promise<void> {
|
||||
const { hash, location, filename, size, type, subtype } = data
|
||||
const { hash, location, filename, size, type } = data
|
||||
|
||||
await sql`
|
||||
UPSERT INTO blob.data (hash, location, filename, size, type, subtype)
|
||||
VALUES (${hash}, ${location}, ${filename}, ${size}, ${type}, ${subtype})
|
||||
UPSERT INTO blob.data (hash, location, filename, size, type)
|
||||
VALUES (${hash}, ${location}, ${filename}, ${size}, ${type})
|
||||
`
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user