mirror of
https://github.com/ragestudio/comty.git
synced 2025-06-10 19:14:16 +00:00
use new file upload chucked
This commit is contained in:
parent
1f9f2f476d
commit
201b28e084
@ -8,12 +8,16 @@
|
|||||||
},
|
},
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@aws-sdk/client-s3": "^3.310.0",
|
||||||
"@corenode/utils": "0.28.26",
|
"@corenode/utils": "0.28.26",
|
||||||
"@foxify/events": "^2.1.0",
|
"@foxify/events": "^2.1.0",
|
||||||
"@tensorflow/tfjs-node": "4.0.0",
|
"@tensorflow/tfjs-node": "4.0.0",
|
||||||
|
"aws-sdk": "^2.1355.0",
|
||||||
"axios": "^1.2.5",
|
"axios": "^1.2.5",
|
||||||
"bcrypt": "^5.1.0",
|
"bcrypt": "^5.1.0",
|
||||||
|
"busboy": "^1.6.0",
|
||||||
"connect-mongo": "^4.6.0",
|
"connect-mongo": "^4.6.0",
|
||||||
|
"content-range": "^2.0.2",
|
||||||
"corenode": "0.28.26",
|
"corenode": "0.28.26",
|
||||||
"dicebar_lib": "1.0.1",
|
"dicebar_lib": "1.0.1",
|
||||||
"dotenv": "^16.0.3",
|
"dotenv": "^16.0.3",
|
||||||
@ -23,6 +27,7 @@
|
|||||||
"jsonwebtoken": "^9.0.0",
|
"jsonwebtoken": "^9.0.0",
|
||||||
"linebridge": "0.15.9",
|
"linebridge": "0.15.9",
|
||||||
"luxon": "^3.2.1",
|
"luxon": "^3.2.1",
|
||||||
|
"merge-files": "^0.1.2",
|
||||||
"mime-types": "^2.1.35",
|
"mime-types": "^2.1.35",
|
||||||
"minio": "^7.0.32",
|
"minio": "^7.0.32",
|
||||||
"moment": "^2.29.4",
|
"moment": "^2.29.4",
|
||||||
@ -34,10 +39,13 @@
|
|||||||
"passport-jwt": "^4.0.1",
|
"passport-jwt": "^4.0.1",
|
||||||
"passport-local": "^1.0.0",
|
"passport-local": "^1.0.0",
|
||||||
"path-to-regexp": "^6.2.1",
|
"path-to-regexp": "^6.2.1",
|
||||||
"sharp": "^0.31.3"
|
"sharp": "^0.31.3",
|
||||||
|
"split-chunk-merge": "^1.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"chai": "^4.3.7",
|
||||||
"cross-env": "^7.0.3",
|
"cross-env": "^7.0.3",
|
||||||
|
"mocha": "^10.2.0",
|
||||||
"nodemon": "^2.0.15"
|
"nodemon": "^2.0.15"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -83,6 +83,7 @@ export default class API {
|
|||||||
global.DEFAULT_POSTING_POLICY = {
|
global.DEFAULT_POSTING_POLICY = {
|
||||||
maxMessageLength: 512,
|
maxMessageLength: 512,
|
||||||
acceptedMimeTypes: [
|
acceptedMimeTypes: [
|
||||||
|
"application/octet-stream",
|
||||||
"image/jpg",
|
"image/jpg",
|
||||||
"image/jpeg",
|
"image/jpeg",
|
||||||
"image/png",
|
"image/png",
|
||||||
|
@ -1,11 +1,98 @@
|
|||||||
import { Controller } from "linebridge/dist/server"
|
import fs from "fs"
|
||||||
|
|
||||||
|
import { Controller } from "linebridge/dist/server"
|
||||||
|
import ChunkedUpload from "@lib/chunkedUpload"
|
||||||
import uploadBodyFiles from "./services/uploadBodyFiles"
|
import uploadBodyFiles from "./services/uploadBodyFiles"
|
||||||
|
|
||||||
|
import { videoTranscode } from "@lib/videoTranscode"
|
||||||
|
import Jimp from "jimp"
|
||||||
|
|
||||||
|
const maximuns = {
|
||||||
|
imageResolution: {
|
||||||
|
width: 3840,
|
||||||
|
height: 2160,
|
||||||
|
},
|
||||||
|
imageQuality: 80,
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processVideo(file, params = {}) {
|
||||||
|
const result = await videoTranscode(file.filepath, global.uploadCachePath, {
|
||||||
|
videoCodec: "libx264",
|
||||||
|
format: "mp4",
|
||||||
|
...params
|
||||||
|
})
|
||||||
|
|
||||||
|
file.filepath = result.filepath
|
||||||
|
file.filename = result.filename
|
||||||
|
|
||||||
|
return file
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processImage(file) {
|
||||||
|
const { width, height } = await new Promise((resolve, reject) => {
|
||||||
|
Jimp.read(file.filepath)
|
||||||
|
.then((image) => {
|
||||||
|
resolve({
|
||||||
|
width: image.bitmap.width,
|
||||||
|
height: image.bitmap.height,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
reject(err)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
if (width > maximuns.imageResolution.width || height > maximuns.imageResolution.height) {
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
Jimp.read(file.filepath)
|
||||||
|
.then((image) => {
|
||||||
|
image
|
||||||
|
.resize(maximuns.imageResolution.width, maximuns.imageResolution.height)
|
||||||
|
.quality(maximuns.imageQuality)
|
||||||
|
.write(file.filepath, resolve)
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
reject(err)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return file
|
||||||
|
}
|
||||||
|
|
||||||
export default class FilesController extends Controller {
|
export default class FilesController extends Controller {
|
||||||
static refName = "FilesController"
|
static refName = "FilesController"
|
||||||
static useRoute = "/files"
|
static useRoute = "/files"
|
||||||
|
|
||||||
|
chunkUploadEngine = new ChunkedUpload({
|
||||||
|
tmpPath: global.uploadCachePath,
|
||||||
|
outputPath: global.uploadCachePath,
|
||||||
|
maxFileSize: global.DEFAULT_POSTING_POLICY.maximumFileSize,
|
||||||
|
acceptedMimeTypes: global.DEFAULT_POSTING_POLICY.acceptedMimeTypes,
|
||||||
|
onExceedMaxFileSize: (req) => {
|
||||||
|
// check if user has permission to upload big files
|
||||||
|
if (!req.user) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return req.user.roles.includes("admin") || req.user.roles.includes("moderator") || req.user.roles.includes("developer")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
fileTransformer = {
|
||||||
|
"video/avi": processVideo,
|
||||||
|
"video/quicktime": processVideo,
|
||||||
|
"video/mp4": processVideo,
|
||||||
|
"video/webm": processVideo,
|
||||||
|
"image/jpeg": processImage,
|
||||||
|
"image/png": processImage,
|
||||||
|
"image/gif": processImage,
|
||||||
|
"image/bmp": processImage,
|
||||||
|
"image/tiff": processImage,
|
||||||
|
"image/webp": processImage,
|
||||||
|
"image/jfif": processImage,
|
||||||
|
}
|
||||||
|
|
||||||
httpEndpoints = {
|
httpEndpoints = {
|
||||||
get: {
|
get: {
|
||||||
"/objects": {
|
"/objects": {
|
||||||
@ -47,6 +134,98 @@ export default class FilesController extends Controller {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
post: {
|
post: {
|
||||||
|
"/upload_chunk": {
|
||||||
|
middlewares: ["withAuthentication", this.chunkUploadEngine.makeMiddleware()],
|
||||||
|
fn: async (req, res) => {
|
||||||
|
if (!req.isLastPart) {
|
||||||
|
return res.json({
|
||||||
|
status: "ok",
|
||||||
|
filePart: req.filePart,
|
||||||
|
lastPart: req.isLastPart,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!req.fileResult) {
|
||||||
|
return res.status(500).json({
|
||||||
|
error: "File upload failed",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// check if mimetype has transformer
|
||||||
|
if (typeof this.fileTransformer[req.fileResult.mimetype] === "function") {
|
||||||
|
req.fileResult = await this.fileTransformer[req.fileResult.mimetype](req.fileResult)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error)
|
||||||
|
return res.status(500).json({
|
||||||
|
error: "File upload failed",
|
||||||
|
reason: error.message,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// start upload to s3
|
||||||
|
const remoteUploadPath = req.user?._id ? `${req.user?._id.toString()}/${req.fileResult.filename}` : file.filename
|
||||||
|
|
||||||
|
const remoteUploadResponse = await new Promise((_resolve, _reject) => {
|
||||||
|
try {
|
||||||
|
const fileStream = fs.createReadStream(req.fileResult.filepath)
|
||||||
|
|
||||||
|
fs.stat(req.fileResult.filepath, (err, stats) => {
|
||||||
|
try {
|
||||||
|
if (err) {
|
||||||
|
return _reject(new Error(`Failed to upload file to storage server > ${err.message}`))
|
||||||
|
}
|
||||||
|
|
||||||
|
global.storage.putObject(global.storage.defaultBucket, remoteUploadPath, fileStream, stats.size, req.fileResult, (err, etag) => {
|
||||||
|
if (err) {
|
||||||
|
return _reject(new Error(`Failed to upload file to storage server > ${err.message}`))
|
||||||
|
}
|
||||||
|
|
||||||
|
return _resolve({
|
||||||
|
etag,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
return _reject(new Error(`Failed to upload file to storage server > ${error.message}`))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
return _reject(new Error(`Failed to upload file to storage server > ${error.message}`))
|
||||||
|
}
|
||||||
|
}).catch((err) => {
|
||||||
|
res.status(500).json({
|
||||||
|
error: err.message,
|
||||||
|
})
|
||||||
|
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!remoteUploadResponse) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// remove file from cache
|
||||||
|
await fs.promises.unlink(req.fileResult.filepath)
|
||||||
|
} catch (error) {
|
||||||
|
console.log("Failed to remove file from cache", error)
|
||||||
|
|
||||||
|
return res.status(500).json({
|
||||||
|
error: error.message,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// get url location
|
||||||
|
const remoteUrlObj = global.storage.composeRemoteURL(remoteUploadPath)
|
||||||
|
|
||||||
|
return res.json({
|
||||||
|
name: req.fileResult.filename,
|
||||||
|
id: remoteUploadPath,
|
||||||
|
url: remoteUrlObj,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
"/upload": {
|
"/upload": {
|
||||||
middlewares: ["withAuthentication"],
|
middlewares: ["withAuthentication"],
|
||||||
fn: async (req, res) => {
|
fn: async (req, res) => {
|
||||||
|
@ -23,7 +23,7 @@ const handleUploadVideo = async (file, params) => {
|
|||||||
return file
|
return file
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleImage = async (file, params) => {
|
const handleImage = async (file) => {
|
||||||
const { width, height } = await new Promise((resolve, reject) => {
|
const { width, height } = await new Promise((resolve, reject) => {
|
||||||
Jimp.read(file.filepath)
|
Jimp.read(file.filepath)
|
||||||
.then((image) => {
|
.then((image) => {
|
||||||
@ -164,9 +164,11 @@ export default async (payload) => {
|
|||||||
}
|
}
|
||||||
case "image/webp": {
|
case "image/webp": {
|
||||||
file = await handleImage(file, params)
|
file = await handleImage(file, params)
|
||||||
|
break
|
||||||
}
|
}
|
||||||
case "image/jfif": {
|
case "image/jfif": {
|
||||||
file = await handleImage(file, params)
|
file = await handleImage(file, params)
|
||||||
|
break
|
||||||
}
|
}
|
||||||
default: {
|
default: {
|
||||||
// do nothing
|
// do nothing
|
||||||
@ -180,7 +182,6 @@ export default async (payload) => {
|
|||||||
filename: file.newFilename,
|
filename: file.newFilename,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// upload path must be user_id + file.newFilename
|
// upload path must be user_id + file.newFilename
|
||||||
const uploadPath = req.user?._id ? `${req.user?._id.toString()}/${file.newFilename}` : file.newFilename
|
const uploadPath = req.user?._id ? `${req.user?._id.toString()}/${file.newFilename}` : file.newFilename
|
||||||
|
|
||||||
|
229
packages/server/src/lib/chunkedUpload/index.js
Normal file
229
packages/server/src/lib/chunkedUpload/index.js
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
import fs from "fs"
|
||||||
|
import path from "path"
|
||||||
|
import mime from "mime-types"
|
||||||
|
import Busboy from "busboy"
|
||||||
|
import crypto from "crypto"
|
||||||
|
import { fsMerge } from "split-chunk-merge"
|
||||||
|
|
||||||
|
export default class ChunkedUpload {
|
||||||
|
constructor(options = {}) {
|
||||||
|
this.options = options
|
||||||
|
|
||||||
|
this.outputPath = options.outputPath
|
||||||
|
this.tmpPath = options.tmpPath ?? "/tmp"
|
||||||
|
|
||||||
|
this.maxFileSize = options.maxFileSize ?? 95
|
||||||
|
this.acceptedMimeTypes = options.acceptedMimeTypes ?? [
|
||||||
|
"image/*",
|
||||||
|
"video/*",
|
||||||
|
"audio/*",
|
||||||
|
]
|
||||||
|
|
||||||
|
this.strictHashCheck = options.strictHashCheck ?? false
|
||||||
|
|
||||||
|
if (!this.outputPath) {
|
||||||
|
throw new Error("Missing outputPath")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_isLastPart = (contentRange) => {
|
||||||
|
return contentRange.size === contentRange.end + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
_makeSureDirExists = dirName => {
|
||||||
|
if (!fs.existsSync(dirName)) {
|
||||||
|
fs.mkdirSync(dirName, { recursive: true })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_buildOriginalFile = async (fileHash, filename) => {
|
||||||
|
const chunkPartsPath = path.join(this.tmpPath, fileHash)
|
||||||
|
const mergedFilePath = path.join(this.outputPath, filename)
|
||||||
|
|
||||||
|
let partsFilenames = fs.readdirSync(chunkPartsPath)
|
||||||
|
|
||||||
|
// sort the parts
|
||||||
|
partsFilenames = partsFilenames.sort((a, b) => {
|
||||||
|
const aNumber = Number(a)
|
||||||
|
const bNumber = Number(b)
|
||||||
|
|
||||||
|
if (aNumber < bNumber) {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
if (aNumber > bNumber) {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0
|
||||||
|
})
|
||||||
|
|
||||||
|
partsFilenames = partsFilenames.map((partFilename) => {
|
||||||
|
return path.join(chunkPartsPath, partFilename)
|
||||||
|
})
|
||||||
|
|
||||||
|
// merge the parts
|
||||||
|
await fsMerge(partsFilenames, mergedFilePath)
|
||||||
|
|
||||||
|
// check hash
|
||||||
|
if (this.strictHashCheck) {
|
||||||
|
const mergedFileHash = await this._getFileHash(mergedFilePath)
|
||||||
|
|
||||||
|
if (mergedFileHash !== fileHash) {
|
||||||
|
throw new Error("File hash mismatch")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.rmdirSync(chunkPartsPath, { recursive: true })
|
||||||
|
|
||||||
|
return mergedFilePath
|
||||||
|
}
|
||||||
|
|
||||||
|
_getFileHash = async (filePath) => {
|
||||||
|
const buffer = await fs.promises.readFile(filePath)
|
||||||
|
|
||||||
|
const hash = await crypto.createHash("sha256")
|
||||||
|
.update(buffer)
|
||||||
|
.digest()
|
||||||
|
|
||||||
|
return hash.toString("hex")
|
||||||
|
}
|
||||||
|
|
||||||
|
makeMiddleware = () => {
|
||||||
|
return (req, res, next) => {
|
||||||
|
const busboy = Busboy({ headers: req.headers })
|
||||||
|
|
||||||
|
busboy.on("file", async (fieldName, file, info) => {
|
||||||
|
try {
|
||||||
|
const fileHash = req.headers["file-hash"]
|
||||||
|
const chunkNumber = req.chunkNumber = req.headers["file-chunk-number"]
|
||||||
|
const totalChunks = req.headers["file-total-chunks"]
|
||||||
|
const fileSize = req.headers["file-size"]
|
||||||
|
|
||||||
|
if (!fileHash) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "Missing header [file-hash]",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!chunkNumber) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "Missing header [file-chunk-number]",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!totalChunks) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "Missing header [file-total-chunks]",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fileSize) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "Missing header [file-size]",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if file size is allowed
|
||||||
|
if (fileSize > this.maxFileSize) {
|
||||||
|
if (typeof this.options.onExceedMaxFileSize === "function") {
|
||||||
|
const result = await this.options.onExceedMaxFileSize({
|
||||||
|
fileHash,
|
||||||
|
chunkNumber,
|
||||||
|
totalChunks,
|
||||||
|
fileSize,
|
||||||
|
headers: req.headers,
|
||||||
|
user: req.user,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!result) {
|
||||||
|
return res.status(413).json({
|
||||||
|
error: "File size is too big",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return res.status(413).json({
|
||||||
|
error: "File size is too big",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if allowedMimeTypes is an array and if it contains the file's mimetype
|
||||||
|
if (this.acceptedMimeTypes && Array.isArray(this.acceptedMimeTypes)) {
|
||||||
|
const regex = new RegExp(this.acceptedMimeTypes.join("|").replace(/\*/g, "[a-z]+").replace(/!/g, "^"), "i")
|
||||||
|
|
||||||
|
if (!regex.test(info.mimeType)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "File type is not allowed",
|
||||||
|
mimeType: info.mimeType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const filePath = path.join(this.tmpPath, fileHash)
|
||||||
|
const chunkPath = path.join(filePath, chunkNumber)
|
||||||
|
|
||||||
|
this._makeSureDirExists(filePath)
|
||||||
|
|
||||||
|
const writeStream = fs.createWriteStream(chunkPath, { flags: "a" })
|
||||||
|
|
||||||
|
file.pipe(writeStream)
|
||||||
|
|
||||||
|
file.on("end", async () => {
|
||||||
|
if (Number(chunkNumber) === totalChunks - 1) {
|
||||||
|
try {
|
||||||
|
// build final filename
|
||||||
|
const realMimeType = mime.lookup(info.filename)
|
||||||
|
const finalFilenameExtension = mime.extension(realMimeType)
|
||||||
|
const finalFilename = `${fileHash}.${finalFilenameExtension}`
|
||||||
|
|
||||||
|
const buildResult = await this._buildOriginalFile(
|
||||||
|
fileHash,
|
||||||
|
finalFilename,
|
||||||
|
)
|
||||||
|
.catch((err) => {
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Failed to build final file",
|
||||||
|
})
|
||||||
|
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
|
||||||
|
if (buildResult) {
|
||||||
|
req.isLastPart = true
|
||||||
|
req.fileResult = {
|
||||||
|
filepath: buildResult,
|
||||||
|
filename: finalFilename,
|
||||||
|
mimetype: realMimeType,
|
||||||
|
size: fileSize,
|
||||||
|
}
|
||||||
|
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return res.status(500).json({
|
||||||
|
error: "Failed to build final file",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
req.isLastPart = false
|
||||||
|
|
||||||
|
return res.status(200).json({
|
||||||
|
message: "Chunk uploaded",
|
||||||
|
chunkNumber,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
console.log("error:", error)
|
||||||
|
|
||||||
|
return res.status(500).json({
|
||||||
|
error: "Failed to upload file",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
req.pipe(busboy)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,22 +1,22 @@
|
|||||||
import path from "path"
|
import path from "path"
|
||||||
import fs from "fs"
|
|
||||||
|
|
||||||
const ffmpeg = require("fluent-ffmpeg")
|
const ffmpeg = require("fluent-ffmpeg")
|
||||||
|
|
||||||
function videoTranscode(originalFilePath, outputPath, options = {}) {
|
function videoTranscode(originalFilePath, outputPath, options = {}) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const filename = path.basename(originalFilePath)
|
const filename = path.basename(originalFilePath)
|
||||||
const outputFilepath = `${outputPath}/${filename.split(".")[0]}.${options.format ?? "webm"}`
|
const outputFilename = `${filename.split(".")[0]}.${options.format ?? "webm"}`
|
||||||
|
const outputFilepath = `${outputPath}/${outputFilename}`
|
||||||
|
|
||||||
console.debug(`[TRANSCODING] Transcoding ${originalFilePath} to ${outputFilepath}`)
|
console.debug(`[TRANSCODING] Transcoding ${originalFilePath} to ${outputFilepath}`)
|
||||||
|
|
||||||
const onEnd = async () => {
|
const onEnd = async () => {
|
||||||
// remove
|
console.debug(`[TRANSCODING] Finished transcode ${originalFilePath} to ${outputFilepath}`)
|
||||||
await fs.promises.unlink(originalFilePath)
|
|
||||||
|
|
||||||
console.debug(`[TRANSCODING] Transcoding ${originalFilePath} to ${outputFilepath} finished`)
|
return resolve({
|
||||||
|
filepath: outputFilepath,
|
||||||
return resolve(outputFilepath)
|
filename: outputFilename,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const onError = (err) => {
|
const onError = (err) => {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user