UBERF-7665: Fix OOM in sharp (#6138)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2024-07-25 18:51:53 +07:00 committed by Andrey Sobolev
parent 2f1e6e56a2
commit 30d9d634c7
No known key found for this signature in database
GPG Key ID: BD80F68D68D8F7F2
3 changed files with 69 additions and 41 deletions

View File

@ -52,11 +52,7 @@ export function parsePreviewConfig (config?: string): PreviewConfig | undefined
if (c === '') { if (c === '') {
continue // Skip empty lines continue // Skip empty lines
} }
const vars = c.split('|') const [provider, url, contentTypes] = c.split('|').map((it) => it.trim())
let [provider, url, formats, contentTypes] = c.split('|').map((it) => it.trim())
if (vars.length === 3) {
contentTypes = formats // Backward compatibility, since formats are obsolete
}
const p: ProviderPreviewConfig = { const p: ProviderPreviewConfig = {
providerId: provider, providerId: provider,
previewUrl: url, previewUrl: url,

View File

@ -44,7 +44,6 @@
style:max-width={width} style:max-width={width}
style:max-height={height} style:max-height={height}
src={blobRef.src} src={blobRef.src}
srcset={blobRef.srcset}
alt={name} alt={name}
style:height={loading ? '0' : ''} style:height={loading ? '0' : ''}
/> />

View File

@ -31,7 +31,9 @@ import sharp from 'sharp'
import { v4 as uuid } from 'uuid' import { v4 as uuid } from 'uuid'
import { preConditions } from './utils' import { preConditions } from './utils'
import fs from 'fs' import fs, { createReadStream, mkdtempSync } from 'fs'
import { rm, writeFile } from 'fs/promises'
import { tmpdir } from 'os'
const cacheControlValue = 'public, max-age=365d' const cacheControlValue = 'public, max-age=365d'
const cacheControlNoCache = 'public, no-store, no-cache, must-revalidate, max-age=0' const cacheControlNoCache = 'public, no-store, no-cache, must-revalidate, max-age=0'
@ -126,12 +128,11 @@ async function getFileRange (
dataStream.on('error', (err) => { dataStream.on('error', (err) => {
ctx.error('error receive stream', { workspace: workspace.name, uuid, error: err }) ctx.error('error receive stream', { workspace: workspace.name, uuid, error: err })
Analytics.handleError(err) Analytics.handleError(err)
res.end() res.end()
dataStream.destroy()
reject(err) reject(err)
}) })
dataStream.on('close', () => {
res.end()
})
}) })
} catch (err: any) { } catch (err: any) {
if (err?.code === 'NoSuchKey' || err?.code === 'NotFound') { if (err?.code === 'NoSuchKey' || err?.code === 'NotFound') {
@ -169,7 +170,8 @@ async function getFile (
'content-type': stat.contentType, 'content-type': stat.contentType,
etag: stat.etag, etag: stat.etag,
'last-modified': new Date(stat.modifiedOn).toISOString(), 'last-modified': new Date(stat.modifiedOn).toISOString(),
'cache-control': cacheControlValue 'cache-control': cacheControlValue,
Connection: 'keep-alive'
}) })
res.end() res.end()
return return
@ -180,7 +182,8 @@ async function getFile (
'content-type': stat.contentType, 'content-type': stat.contentType,
etag: stat.etag, etag: stat.etag,
'last-modified': new Date(stat.modifiedOn).toISOString(), 'last-modified': new Date(stat.modifiedOn).toISOString(),
'cache-control': cacheControlValue 'cache-control': cacheControlValue,
Connection: 'keep-alive'
}) })
res.end() res.end()
return return
@ -196,7 +199,8 @@ async function getFile (
'Content-Type': stat.contentType, 'Content-Type': stat.contentType,
Etag: stat.etag, Etag: stat.etag,
'Last-Modified': new Date(stat.modifiedOn).toISOString(), 'Last-Modified': new Date(stat.modifiedOn).toISOString(),
'Cache-Control': cacheControlValue 'Cache-Control': cacheControlValue,
Connection: 'keep-alive'
}) })
dataStream.pipe(res) dataStream.pipe(res)
@ -209,6 +213,9 @@ async function getFile (
dataStream.on('error', function (err) { dataStream.on('error', function (err) {
Analytics.handleError(err) Analytics.handleError(err)
ctx.error('error', { err }) ctx.error('error', { err })
res.end()
dataStream.destroy()
reject(err) reject(err)
}) })
}) })
@ -249,17 +256,21 @@ export function start (
): () => void { ): () => void {
const app = express() const app = express()
const tempFileDir = mkdtempSync(join(tmpdir(), 'front-'))
let temoFileIndex = 0
app.use(cors()) app.use(cors())
app.use( app.use(
fileUpload({ fileUpload({
useTempFiles: true useTempFiles: true,
tempFileDir
}) })
) )
app.use(bp.json()) app.use(bp.json())
app.use(bp.urlencoded({ extended: true })) app.use(bp.urlencoded({ extended: true }))
const childLogger = ctx.logger.childLogger?.('requests', { const childLogger = ctx.logger.childLogger?.('requests', {
enableConsole: 'false' enableConsole: 'true'
}) })
const requests = ctx.newChild('requests', {}, {}, childLogger) const requests = ctx.newChild('requests', {}, {}, childLogger)
@ -383,7 +394,7 @@ export function start (
) )
if (blobInfo === undefined) { if (blobInfo === undefined) {
ctx.error('No such key', { file: uuid, workspace: payload.workspace }) ctx.error('No such key', { file: uuid, workspace: payload.workspace.name })
res.status(404).send() res.status(404).send()
return return
} }
@ -417,12 +428,14 @@ export function start (
const size = req.query.size !== undefined ? parseInt(req.query.size as string) : undefined const size = req.query.size !== undefined ? parseInt(req.query.size as string) : undefined
const accept = req.headers.accept const accept = req.headers.accept
if (accept !== undefined && isImage && blobInfo.contentType !== 'image/gif') { if (accept !== undefined && isImage && blobInfo.contentType !== 'image/gif' && size !== undefined) {
blobInfo = await ctx.with( blobInfo = await ctx.with(
'resize', 'resize',
{}, {},
async (ctx) => async (ctx) =>
await getGeneratePreview(ctx, blobInfo as PlatformBlob, size ?? -1, uuid, config, payload, accept) await getGeneratePreview(ctx, blobInfo as PlatformBlob, size, uuid, config, payload, accept, () =>
join(tempFileDir, `${++temoFileIndex}`)
)
) )
} }
@ -746,7 +759,8 @@ async function getGeneratePreview (
uuid: string, uuid: string,
config: { storageAdapter: StorageAdapter }, config: { storageAdapter: StorageAdapter },
payload: Token, payload: Token,
accept: string accept: string,
tempFile: () => string
): Promise<PlatformBlob> { ): Promise<PlatformBlob> {
if (size === undefined) { if (size === undefined) {
return blob return blob
@ -769,6 +783,14 @@ async function getGeneratePreview (
return blob return blob
} }
if (size === -1) {
size = 2048
}
if (size > 2048) {
size = 2048
}
const sizeId = uuid + `%preview%${size}${format !== 'jpeg' ? format : ''}` const sizeId = uuid + `%preview%${size}${format !== 'jpeg' ? format : ''}`
const d = await config.storageAdapter.stat(ctx, payload.workspace, sizeId) const d = await config.storageAdapter.stat(ctx, payload.workspace, sizeId)
@ -778,54 +800,61 @@ async function getGeneratePreview (
// We have cached small document, let's proceed with it. // We have cached small document, let's proceed with it.
return d return d
} else { } else {
let data: Buffer const files: string[] = []
try { try {
// Let's get data and resize it // Let's get data and resize it
data = Buffer.concat(await config.storageAdapter.read(ctx, payload.workspace, uuid)) const fname = tempFile()
files.push(fname)
let pipeline = sharp(data) await writeFile(fname, await config.storageAdapter.get(ctx, payload.workspace, uuid))
let pipeline = sharp(fname)
sharp.cache(false) sharp.cache(false)
// const metadata = await pipeline.metadata() pipeline = pipeline.resize({
width: size,
if (size !== -1) { fit: 'cover',
pipeline = pipeline.resize({ withoutEnlargement: true
width: size, })
fit: 'cover',
withoutEnlargement: true
})
}
let contentType = 'image/jpeg' let contentType = 'image/jpeg'
switch (format) { switch (format) {
case 'jpeg': case 'jpeg':
pipeline = pipeline.jpeg({}) pipeline = pipeline.jpeg({
progressive: true
})
contentType = 'image/jpeg' contentType = 'image/jpeg'
break break
case 'avif': case 'avif':
pipeline = pipeline.avif({ pipeline = pipeline.avif({
quality: size !== undefined && size < 128 ? undefined : 85 lossless: false,
effort: 0
}) })
contentType = 'image/avif' contentType = 'image/avif'
break break
case 'heif': case 'heif':
pipeline = pipeline.heif({ pipeline = pipeline.heif({
quality: size !== undefined && size < 128 ? undefined : 80 effort: 0
}) })
contentType = 'image/heif' contentType = 'image/heif'
break break
case 'webp': case 'webp':
pipeline = pipeline.webp() pipeline = pipeline.webp({
effort: 0
})
contentType = 'image/webp' contentType = 'image/webp'
break break
case 'png': case 'png':
pipeline = pipeline.png() pipeline = pipeline.png({
effort: 0
})
contentType = 'image/png' contentType = 'image/png'
break break
} }
const dataBuff = await pipeline.toBuffer() const outFile = tempFile()
files.push(outFile)
const dataBuff = await ctx.with('resize', { contentType }, async () => await pipeline.toFile(outFile))
pipeline.destroy() pipeline.destroy()
// Add support of avif as well. // Add support of avif as well.
@ -833,14 +862,14 @@ async function getGeneratePreview (
ctx, ctx,
payload.workspace, payload.workspace,
sizeId, sizeId,
dataBuff, createReadStream(outFile),
contentType, contentType,
dataBuff.length dataBuff.size
) )
return { return {
...blob, ...blob,
_id: sizeId as Ref<PlatformBlob>, _id: sizeId as Ref<PlatformBlob>,
size: dataBuff.length, size: dataBuff.size,
contentType, contentType,
etag: upload.etag, etag: upload.etag,
storageId: sizeId storageId: sizeId
@ -858,6 +887,10 @@ async function getGeneratePreview (
// Return original in case of error // Return original in case of error
return blob return blob
} finally {
for (const f of files) {
await rm(f)
}
} }
} }
} }