mirror of
https://github.com/ragestudio/comty.git
synced 2025-06-09 10:34:17 +00:00
fix silly upload not using an upload id on basePath
This commit is contained in:
parent
87f750cc8b
commit
6dc375bb9c
@ -1,20 +1,20 @@
|
|||||||
import fs from "node:fs"
|
import fs from "node:fs"
|
||||||
import path from "node:path"
|
import path from "node:path"
|
||||||
|
|
||||||
import mimeTypes from "mime-types"
|
import { fileTypeFromBuffer } from "file-type"
|
||||||
import {fileTypeFromBuffer} from 'file-type'
|
|
||||||
import readChunk from "@utils/readChunk"
|
import readChunk from "@utils/readChunk"
|
||||||
|
|
||||||
import getFileHash from "@shared-utils/readFileHash"
|
import getFileHash from "@shared-utils/readFileHash"
|
||||||
import putObject from "./putObject"
|
import putObject from "./putObject"
|
||||||
|
|
||||||
import Transformation, { TransformationPayloadType } from "../Transformation"
|
import Transformation from "../Transformation"
|
||||||
|
|
||||||
export type FileHandlePayload = {
|
export type FileHandlePayload = {
|
||||||
user_id: string
|
user_id: string
|
||||||
filePath: string
|
filePath: string
|
||||||
workPath: string
|
workPath: string
|
||||||
uploadId?: string
|
targetPath?: string // mostly provided by processed results
|
||||||
|
//uploadId?: string
|
||||||
transformations?: Array<string>
|
transformations?: Array<string>
|
||||||
s3Provider?: string
|
s3Provider?: string
|
||||||
onProgress?: Function
|
onProgress?: Function
|
||||||
@ -23,7 +23,8 @@ export type FileHandlePayload = {
|
|||||||
export type S3UploadPayload = {
|
export type S3UploadPayload = {
|
||||||
filePath: string
|
filePath: string
|
||||||
basePath: string
|
basePath: string
|
||||||
targePath?: string
|
targetPath?: string
|
||||||
|
onProgress?: Function
|
||||||
}
|
}
|
||||||
|
|
||||||
export default class Upload {
|
export default class Upload {
|
||||||
@ -38,7 +39,8 @@ export default class Upload {
|
|||||||
const result = await Upload.toS3({
|
const result = await Upload.toS3({
|
||||||
filePath: payload.filePath,
|
filePath: payload.filePath,
|
||||||
targetPath: payload.targetPath,
|
targetPath: payload.targetPath,
|
||||||
basePath: payload.user_id,
|
basePath: `${payload.user_id}/${global.nanoid()}`,
|
||||||
|
onProgress: payload.onProgress,
|
||||||
})
|
})
|
||||||
|
|
||||||
// delete workpath
|
// delete workpath
|
||||||
@ -49,7 +51,7 @@ export default class Upload {
|
|||||||
|
|
||||||
static process = async (payload: FileHandlePayload) => {
|
static process = async (payload: FileHandlePayload) => {
|
||||||
if (Array.isArray(payload.transformations)) {
|
if (Array.isArray(payload.transformations)) {
|
||||||
for await (const transformation: TransformationPayloadType of payload.transformations) {
|
for await (const transformation of payload.transformations) {
|
||||||
const transformationResult = await Transformation.transform({
|
const transformationResult = await Transformation.transform({
|
||||||
filePath: payload.filePath,
|
filePath: payload.filePath,
|
||||||
workPath: payload.workPath,
|
workPath: payload.workPath,
|
||||||
@ -64,9 +66,9 @@ export default class Upload {
|
|||||||
|
|
||||||
// if is a directory, overwrite filePath to upload entire directory
|
// if is a directory, overwrite filePath to upload entire directory
|
||||||
if (transformationResult.outputPath) {
|
if (transformationResult.outputPath) {
|
||||||
payload.filePath = transformationResult.outputPath
|
payload.filePath = transformationResult.outputPath
|
||||||
payload.targetPath = transformationResult.outputFile
|
payload.targetPath = transformationResult.outputFile
|
||||||
payload.isDirectory = true
|
//payload.isDirectory = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -74,20 +76,22 @@ export default class Upload {
|
|||||||
return payload
|
return payload
|
||||||
}
|
}
|
||||||
|
|
||||||
static toS3 = async (payload: S3UploadPayload, onProgress?: Function) => {
|
static toS3 = async (payload: S3UploadPayload) => {
|
||||||
const { filePath, basePath, targetPath } = payload
|
const { filePath, basePath, targetPath, onProgress } = payload
|
||||||
|
|
||||||
const firstBuffer = await readChunk(targetPath ?? filePath, { length: 4100 })
|
// if targetPath is provided, means its a directory
|
||||||
const fileHash = await getFileHash(fs.createReadStream(targetPath ?? filePath))
|
const isDirectory = targetPath !== undefined
|
||||||
const fileType = await fileTypeFromBuffer(firstBuffer)
|
|
||||||
|
|
||||||
const uploadPath = path.join(basePath, path.basename(filePath))
|
let uploadPath = path.resolve(basePath, path.basename(filePath))
|
||||||
|
|
||||||
const metadata = {
|
if (isDirectory) {
|
||||||
"File-Hash": fileHash,
|
uploadPath = basePath
|
||||||
"Content-Type": fileType.mime,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const metadata = await this.buildFileMetadata(
|
||||||
|
isDirectory ? targetPath : filePath,
|
||||||
|
)
|
||||||
|
|
||||||
if (typeof onProgress === "function") {
|
if (typeof onProgress === "function") {
|
||||||
onProgress({
|
onProgress({
|
||||||
percent: 0,
|
percent: 0,
|
||||||
@ -95,13 +99,36 @@ export default class Upload {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log("Uploading to S3:", {
|
||||||
|
filePath,
|
||||||
|
uploadPath,
|
||||||
|
basePath,
|
||||||
|
targetPath,
|
||||||
|
metadata,
|
||||||
|
})
|
||||||
|
|
||||||
const result = await putObject({
|
const result = await putObject({
|
||||||
filePath: filePath,
|
filePath: filePath,
|
||||||
uploadPath: uploadPath,
|
uploadPath: uploadPath,
|
||||||
metadata: metadata,
|
metadata: metadata,
|
||||||
targetFilename: targetPath ? path.basename(targetPath) : null,
|
targetFilename: isDirectory ? path.basename(targetPath) : null,
|
||||||
})
|
})
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static async buildFileMetadata(filePath: string) {
|
||||||
|
const firstBuffer = await readChunk(filePath, {
|
||||||
|
length: 4100,
|
||||||
|
})
|
||||||
|
const fileHash = await getFileHash(fs.createReadStream(filePath))
|
||||||
|
const fileType = await fileTypeFromBuffer(firstBuffer)
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
"File-Hash": fileHash,
|
||||||
|
"Content-Type": fileType?.mime ?? "application/octet-stream",
|
||||||
|
}
|
||||||
|
|
||||||
|
return metadata
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -67,8 +67,6 @@ export default {
|
|||||||
maxChunkSize: config.maxChunkSize,
|
maxChunkSize: config.maxChunkSize,
|
||||||
})
|
})
|
||||||
|
|
||||||
const useJob = true
|
|
||||||
|
|
||||||
if (typeof assemble === "function") {
|
if (typeof assemble === "function") {
|
||||||
try {
|
try {
|
||||||
assemble = await assemble()
|
assemble = await assemble()
|
||||||
@ -116,6 +114,7 @@ export default {
|
|||||||
return {
|
return {
|
||||||
next: true,
|
next: true,
|
||||||
chunkNumber: req.headers["uploader-chunk-number"],
|
chunkNumber: req.headers["uploader-chunk-number"],
|
||||||
|
config: config,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user