mirror of
https://github.com/ragestudio/comty.git
synced 2025-06-09 10:34:17 +00:00
fix silly upload not using an upload id on basePath
This commit is contained in:
parent
87f750cc8b
commit
6dc375bb9c
@ -1,20 +1,20 @@
|
||||
import fs from "node:fs"
|
||||
import path from "node:path"
|
||||
|
||||
import mimeTypes from "mime-types"
|
||||
import {fileTypeFromBuffer} from 'file-type'
|
||||
import { fileTypeFromBuffer } from "file-type"
|
||||
import readChunk from "@utils/readChunk"
|
||||
|
||||
import getFileHash from "@shared-utils/readFileHash"
|
||||
import putObject from "./putObject"
|
||||
|
||||
import Transformation, { TransformationPayloadType } from "../Transformation"
|
||||
import Transformation from "../Transformation"
|
||||
|
||||
export type FileHandlePayload = {
|
||||
user_id: string
|
||||
filePath: string
|
||||
workPath: string
|
||||
uploadId?: string
|
||||
targetPath?: string // mostly provided by processed results
|
||||
//uploadId?: string
|
||||
transformations?: Array<string>
|
||||
s3Provider?: string
|
||||
onProgress?: Function
|
||||
@ -23,7 +23,8 @@ export type FileHandlePayload = {
|
||||
export type S3UploadPayload = {
|
||||
filePath: string
|
||||
basePath: string
|
||||
targePath?: string
|
||||
targetPath?: string
|
||||
onProgress?: Function
|
||||
}
|
||||
|
||||
export default class Upload {
|
||||
@ -38,7 +39,8 @@ export default class Upload {
|
||||
const result = await Upload.toS3({
|
||||
filePath: payload.filePath,
|
||||
targetPath: payload.targetPath,
|
||||
basePath: payload.user_id,
|
||||
basePath: `${payload.user_id}/${global.nanoid()}`,
|
||||
onProgress: payload.onProgress,
|
||||
})
|
||||
|
||||
// delete workpath
|
||||
@ -49,7 +51,7 @@ export default class Upload {
|
||||
|
||||
static process = async (payload: FileHandlePayload) => {
|
||||
if (Array.isArray(payload.transformations)) {
|
||||
for await (const transformation: TransformationPayloadType of payload.transformations) {
|
||||
for await (const transformation of payload.transformations) {
|
||||
const transformationResult = await Transformation.transform({
|
||||
filePath: payload.filePath,
|
||||
workPath: payload.workPath,
|
||||
@ -66,7 +68,7 @@ export default class Upload {
|
||||
if (transformationResult.outputPath) {
|
||||
payload.filePath = transformationResult.outputPath
|
||||
payload.targetPath = transformationResult.outputFile
|
||||
payload.isDirectory = true
|
||||
//payload.isDirectory = true
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -74,20 +76,22 @@ export default class Upload {
|
||||
return payload
|
||||
}
|
||||
|
||||
static toS3 = async (payload: S3UploadPayload, onProgress?: Function) => {
|
||||
const { filePath, basePath, targetPath } = payload
|
||||
static toS3 = async (payload: S3UploadPayload) => {
|
||||
const { filePath, basePath, targetPath, onProgress } = payload
|
||||
|
||||
const firstBuffer = await readChunk(targetPath ?? filePath, { length: 4100 })
|
||||
const fileHash = await getFileHash(fs.createReadStream(targetPath ?? filePath))
|
||||
const fileType = await fileTypeFromBuffer(firstBuffer)
|
||||
// if targetPath is provided, means its a directory
|
||||
const isDirectory = targetPath !== undefined
|
||||
|
||||
const uploadPath = path.join(basePath, path.basename(filePath))
|
||||
let uploadPath = path.resolve(basePath, path.basename(filePath))
|
||||
|
||||
const metadata = {
|
||||
"File-Hash": fileHash,
|
||||
"Content-Type": fileType.mime,
|
||||
if (isDirectory) {
|
||||
uploadPath = basePath
|
||||
}
|
||||
|
||||
const metadata = await this.buildFileMetadata(
|
||||
isDirectory ? targetPath : filePath,
|
||||
)
|
||||
|
||||
if (typeof onProgress === "function") {
|
||||
onProgress({
|
||||
percent: 0,
|
||||
@ -95,13 +99,36 @@ export default class Upload {
|
||||
})
|
||||
}
|
||||
|
||||
console.log("Uploading to S3:", {
|
||||
filePath,
|
||||
uploadPath,
|
||||
basePath,
|
||||
targetPath,
|
||||
metadata,
|
||||
})
|
||||
|
||||
const result = await putObject({
|
||||
filePath: filePath,
|
||||
uploadPath: uploadPath,
|
||||
metadata: metadata,
|
||||
targetFilename: targetPath ? path.basename(targetPath) : null,
|
||||
targetFilename: isDirectory ? path.basename(targetPath) : null,
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
static async buildFileMetadata(filePath: string) {
|
||||
const firstBuffer = await readChunk(filePath, {
|
||||
length: 4100,
|
||||
})
|
||||
const fileHash = await getFileHash(fs.createReadStream(filePath))
|
||||
const fileType = await fileTypeFromBuffer(firstBuffer)
|
||||
|
||||
const metadata = {
|
||||
"File-Hash": fileHash,
|
||||
"Content-Type": fileType?.mime ?? "application/octet-stream",
|
||||
}
|
||||
|
||||
return metadata
|
||||
}
|
||||
}
|
||||
|
@ -67,8 +67,6 @@ export default {
|
||||
maxChunkSize: config.maxChunkSize,
|
||||
})
|
||||
|
||||
const useJob = true
|
||||
|
||||
if (typeof assemble === "function") {
|
||||
try {
|
||||
assemble = await assemble()
|
||||
@ -116,6 +114,7 @@ export default {
|
||||
return {
|
||||
next: true,
|
||||
chunkNumber: req.headers["uploader-chunk-number"],
|
||||
config: config,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user