mirror of
https://github.com/ragestudio/comty.git
synced 2025-06-09 10:34:17 +00:00
Replace image and video compression with unified optimize handler
This commit is contained in:
parent
4f82ef46d0
commit
56c9ee7b19
@ -1,43 +0,0 @@
|
||||
import fs from "node:fs"
|
||||
import path from "node:path"
|
||||
|
||||
import Sharp from "sharp"
|
||||
|
||||
const imageProcessingConf = {
|
||||
sizeThreshold: 10 * 1024 * 1024,
|
||||
imageQuality: 80,
|
||||
}
|
||||
|
||||
const imageTypeToConfig = {
|
||||
png: {
|
||||
compressionLevel: Math.floor(imageProcessingConf.imageQuality / 100),
|
||||
},
|
||||
default: {
|
||||
quality: imageProcessingConf.imageQuality,
|
||||
},
|
||||
}
|
||||
|
||||
export default async ({ filePath, workPath }) => {
|
||||
const stat = await fs.promises.stat(file.filepath)
|
||||
|
||||
// ignore if too small
|
||||
if (stat.size < imageProcessingConf.sizeThreshold) {
|
||||
return file
|
||||
}
|
||||
|
||||
let image = await Sharp(filePath)
|
||||
|
||||
const { format } = await image.metadata()
|
||||
|
||||
image = await image[format](
|
||||
imageTypeToConfig[format] ?? imageTypeToConfig.default,
|
||||
).withMetadata()
|
||||
|
||||
filePath = path.resolve(workPath, `${path.basename(filePath)}_ff`)
|
||||
|
||||
await image.toFile(outputFilepath)
|
||||
|
||||
return {
|
||||
filePath: filePath,
|
||||
}
|
||||
}
|
63
packages/server/classes/Transformation/handlers/optimize.js
Normal file
63
packages/server/classes/Transformation/handlers/optimize.js
Normal file
@ -0,0 +1,63 @@
|
||||
import fs from "node:fs"
|
||||
import path from "node:path"
|
||||
import { fileTypeFromBuffer } from "file-type"
|
||||
|
||||
import readChunk from "@shared-utils/readChunk"
|
||||
|
||||
import Sharp from "sharp"
|
||||
|
||||
const thresholds = {
|
||||
size: 10 * 1024 * 1024,
|
||||
}
|
||||
|
||||
const sharpConfigs = {
|
||||
png: {
|
||||
compressionLevel: 6,
|
||||
//quality: 80,
|
||||
},
|
||||
jpeg: {
|
||||
quality: 80,
|
||||
mozjpeg: true,
|
||||
},
|
||||
default: {
|
||||
quality: 80,
|
||||
},
|
||||
}
|
||||
|
||||
export default async ({ filePath, workPath, onProgress }) => {
|
||||
const stat = await fs.promises.stat(filePath)
|
||||
|
||||
const firstBuffer = await readChunk(filePath, {
|
||||
length: 4100,
|
||||
})
|
||||
const fileType = await fileTypeFromBuffer(firstBuffer)
|
||||
|
||||
// first check if size over threshold
|
||||
if (stat.size < thresholds.size) {
|
||||
return {
|
||||
outputFile: filePath,
|
||||
}
|
||||
}
|
||||
|
||||
// get the type of the file mime
|
||||
const type = fileType.mime.split("/")[0]
|
||||
|
||||
switch (type) {
|
||||
case "image": {
|
||||
let image = Sharp(filePath)
|
||||
|
||||
const metadata = await image.metadata()
|
||||
const config = sharpConfigs[metadata.format] ?? sharpConfigs.default
|
||||
|
||||
image = await image[metadata.format](config).withMetadata()
|
||||
|
||||
filePath = path.resolve(workPath, `${path.basename(filePath)}_ff`)
|
||||
|
||||
await image.toFile(filePath)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
outputFile: filePath,
|
||||
}
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
export default async ({ filePath, workPath }) => {
|
||||
// TODO: Implement video compression logic
|
||||
return {
|
||||
filePath: filePath,
|
||||
}
|
||||
}
|
@ -1,8 +1,7 @@
|
||||
const Handlers = {
|
||||
"a-dash": require("./handlers/a-dash").default,
|
||||
"mq-hls": require("./handlers/mq-hls").default,
|
||||
"img-compress": require("./handlers/img-compress").default,
|
||||
"video-compress": require("./handlers/video-compress").default,
|
||||
optimize: require("./handlers/optimize").default,
|
||||
}
|
||||
|
||||
export type TransformationPayloadType = {
|
||||
|
@ -15,6 +15,7 @@ export type FileHandlePayload = {
|
||||
targetPath?: string // mostly provided by processed results
|
||||
//uploadId?: string
|
||||
transformations?: Array<string>
|
||||
useCompression?: boolean
|
||||
s3Provider?: string
|
||||
onProgress?: Function
|
||||
}
|
||||
@ -29,11 +30,26 @@ export type S3UploadPayload = {
|
||||
|
||||
export default class Upload {
|
||||
static fileHandle = async (payload: FileHandlePayload) => {
|
||||
// process
|
||||
const processed = await Upload.process(payload)
|
||||
if (!payload.transformations) {
|
||||
payload.transformations = []
|
||||
}
|
||||
|
||||
// overwrite filePath
|
||||
payload.filePath = processed.filePath
|
||||
// if compression is enabled and no transformations are provided, add basic transformations for images or videos
|
||||
if (
|
||||
payload.useCompression === true &&
|
||||
payload.transformations.length === 0
|
||||
) {
|
||||
payload.transformations.push("optimize")
|
||||
}
|
||||
|
||||
// process file upload if transformations are provided
|
||||
if (payload.transformations.length > 0) {
|
||||
// process
|
||||
const processed = await Upload.transform(payload)
|
||||
|
||||
// overwrite filePath
|
||||
payload.filePath = processed.filePath
|
||||
}
|
||||
|
||||
// upload
|
||||
const result = await Upload.toS3({
|
||||
@ -50,7 +66,7 @@ export default class Upload {
|
||||
return result
|
||||
}
|
||||
|
||||
static process = async (payload: FileHandlePayload) => {
|
||||
static transform = async (payload: FileHandlePayload) => {
|
||||
if (Array.isArray(payload.transformations)) {
|
||||
for await (const transformation of payload.transformations) {
|
||||
const transformationResult = await Transformation.transform({
|
||||
@ -91,7 +107,7 @@ export default class Upload {
|
||||
let uploadPath = path.join(basePath, metadata["File-Hash"])
|
||||
|
||||
if (isDirectory) {
|
||||
uploadPath = path.join(basePath, nanoid())
|
||||
uploadPath = path.join(basePath, global.nanoid())
|
||||
}
|
||||
|
||||
if (typeof onProgress === "function") {
|
||||
|
@ -86,10 +86,14 @@ export default {
|
||||
workPath: workPath,
|
||||
transformations: transformations,
|
||||
s3Provider: config.useProvider,
|
||||
useCompression: config.useCompression,
|
||||
}
|
||||
|
||||
// if has transformations, use background job
|
||||
if (transformations && transformations.length > 0) {
|
||||
if (
|
||||
(transformations && transformations.length > 0) ||
|
||||
config.useCompression
|
||||
) {
|
||||
const job = await global.queues.createJob(
|
||||
"file-process",
|
||||
payload,
|
||||
|
Loading…
x
Reference in New Issue
Block a user