SrGooglo f62e885c65 Refactor file upload system with new transformations pipeline
The commit refactors the chunked upload system to support a transformation pipeline. Key changes include:

- Replace SSE field names for consistency (sseChannelId, sseUrl)
- Fix progress reporting structure with state and percent fields
- Add transformation handlers (a-dash, mq-hls, img-compress, video-compress)
- Create new Upload class with clear separation of concerns
- Improve file processing workflow with better directory structure
- Fix typo in UploadButton component (progess → progress)
- Remove deprecated file processing services
2025-04-24 06:06:21 +00:00

59 lines
1.0 KiB
JavaScript

import fs from "node:fs"
import path from "node:path"
import pMap from "p-map"
export default async function standardUpload({
filePath,
uploadPath,
metadata = {},
targetFilename,
onFinish,
}) {
const isDirectory = await fs.promises
.lstat(filePath)
.then((stats) => stats.isDirectory())
if (isDirectory) {
let files = await fs.promises.readdir(filePath)
files = files.map((file) => {
const newPath = path.join(filePath, file)
return {
filePath: newPath,
uploadPath: path.join(uploadPath, file),
}
})
await pMap(files, standardUpload, {
concurrency: 3,
})
return {
id: uploadPath,
url: global.storage.composeRemoteURL(uploadPath, targetFilename),
metadata: metadata,
}
}
// upload to storage
await global.storage.fPutObject(
process.env.S3_BUCKET,
uploadPath,
filePath,
metadata,
)
const result = {
id: uploadPath,
url: global.storage.composeRemoteURL(uploadPath),
metadata: metadata,
}
if (typeof onFinish === "function") {
await onFinish(result)
}
return result
}