implement file_server

This commit is contained in:
SrGooglo 2023-05-31 01:34:30 +00:00
parent 5c4d3ddd84
commit e38a139c12
24 changed files with 1272 additions and 20 deletions

View File

@ -37,4 +37,4 @@
"mocha": "^10.2.0",
"nodemon": "^2.0.15"
}
}
}

View File

@ -1,25 +1,48 @@
import fs from "fs"
import path from "path"
//import RedisClient from "@classes/RedisClient"
import RedisClient from "@classes/RedisClient"
import StorageClient from "@classes/StorageClient"
import CacheService from "@classes/CacheService"
import ComtyClient from "@classes/ComtyClient"
import express from "express"
import hyperexpress from "hyper-express"
import pkg from "../package.json"
export default class FileServerAPI {
server = global.server = new hyperexpress.Server()
// max body length is 1GB in bytes
static maxBodyLength = 1000 * 1000 * 1000
// server = global.server = new hyperexpress.Server({
// auto_close: true,
// fast_buffers: false,
// max_body_length: FileServerAPI.maxBodyLength,
// streaming: {
// highWaterMark: 1000 * 1000 * 1000,
// objectMode: false,
// }
// })
//internalRouter = new hyperexpress.Router()
internalRouter = express.Router()
server = global.server = express()
listenIp = process.env.HTTP_LISTEN_IP ?? "0.0.0.0"
listenPort = process.env.HTTP_LISTEN_PORT ?? 3006
listenPort = process.env.HTTP_LISTEN_PORT ?? 3060
internalRouter = new hyperexpress.Router()
//redis = global.redis = RedisClient()
redis = global.redis = RedisClient()
storage = global.storage = StorageClient()
cache = global.cache = new CacheService()
comty = global.comty = ComtyClient({
useWs: false,
})
async __loadControllers() {
let controllersPath = fs.readdirSync(path.resolve(__dirname, "controllers"))
@ -32,7 +55,7 @@ export default class FileServerAPI {
continue
}
const handler = controller(new hyperexpress.Router())
const handler = await controller(express.Router())
if (!handler) {
this.server.InternalConsole.error(`Controller ${controllerPath} returning not valid handler.`)
@ -47,10 +70,27 @@ export default class FileServerAPI {
}
async __loadMiddlewares() {
let middlewaresPath = fs.readdirSync(path.resolve(__dirname, "middlewares"))
let middlewaresPath = fs.readdirSync(path.resolve(__dirname, "useMiddlewares"))
if (this.constructor.useMiddlewaresOrder) {
middlewaresPath = middlewaresPath.sort((a, b) => {
const aIndex = this.constructor.useMiddlewaresOrder.indexOf(a.replace(".js", ""))
const bIndex = this.constructor.useMiddlewaresOrder.indexOf(b.replace(".js", ""))
if (aIndex === -1) {
return 1
}
if (bIndex === -1) {
return -1
}
return aIndex - bIndex
})
}
for await (const middlewarePath of middlewaresPath) {
const middleware = require(path.resolve(__dirname, "middlewares", middlewarePath)).default
const middleware = require(path.resolve(__dirname, "useMiddlewares", middlewarePath)).default
if (!middleware) {
this.server.InternalConsole.error(`Middleware ${middlewarePath} not found.`)
@ -72,15 +112,18 @@ export default class FileServerAPI {
}
__registerInternalRoutes() {
this.server.get("/", (req, res) => {
this.internalRouter.get("/", (req, res) => {
return res.status(200).json({
name: pkg.name,
version: pkg.version,
routes: this.__getRegisteredRoutes()
})
})
this.server.any("*", (req, res) => {
// this.internalRouter.get("/routes", (req, res) => {
// return res.status(200).json(this.__getRegisteredRoutes())
// })
this.internalRouter.get("*", (req, res) => {
return res.status(404).json({
error: "Not found",
})
@ -94,10 +137,13 @@ export default class FileServerAPI {
await this.redis.initialize()
await this.storage.initialize()
this.server.use(express.json({ extended: false }))
this.server.use(express.urlencoded({ extended: true }))
// register controllers & middlewares
await this.__registerInternalRoutes()
await this.__loadControllers()
await this.__loadMiddlewares()
await this.__registerInternalRoutes()
// use internal router
this.server.use(this.internalRouter)

View File

@ -0,0 +1,72 @@
import fs from "fs"
import path from "path"
export default class CacheService {
static deletionInterval = 1000 * 60 * 5
static cachePath = path.join(process.cwd(), ".cache")
constructor(params = {}) {
this.params = params
if (!fs.existsSync(CacheService.cachePath)) {
fs.mkdirSync(CacheService.cachePath, { recursive: true })
}
}
intervalMaps = new Map()
checkDeletionFilepath(filepath) {
try {
const stats = fs.statSync(filepath)
stats.atime = new Date(stats.atime)
if (stats.atime.getTime() + CacheService.deletionInterval < Date.now()) {
fs.promises.unlink(filepath)
} else {
return false
}
return true
} catch (error) {
console.error(error)
fs.promises.unlink(filepath)
return true
}
}
appendToDeletion(filepath) {
// create a interval of 5 minutes to delete the file
// check the last time the file was accessed and if it was accessed in the last 5 minutes
// reset the interval until the file is not accessed for 5 minutes and then delete it
try {
const createInterval = () => {
let interval = setInterval(async () => {
try {
await this.checkDeletionFilepath(filepath)
this.intervalMaps.delete(filepath)
if (!results) {
this.appendToDeletion(filepath)
}
} catch (error) {
return clearInterval(interval)
}
})
return interval
}
this.intervalMaps.set(filepath, createInterval())
} catch (error) {
console.error(error)
return fs.promises.unlink(filepath)
}
}
}

View File

@ -0,0 +1,9 @@
import createClient from "comty.js"
export default (params = {}) => {
return createClient({
...params,
accessKey: process.env.COMTY_ACCESS_KEY,
privateKey: process.env.COMTY_PRIVATE_KEY,
})
}

View File

@ -0,0 +1,58 @@
import mongoose from "mongoose"
function getConnectionConfig(obj) {
const { DB_USER, DB_DRIVER, DB_NAME, DB_PWD, DB_HOSTNAME, DB_PORT } = obj
let auth = [
DB_DRIVER ?? "mongodb",
"://",
]
if (DB_USER && DB_PWD) {
auth.push(`${DB_USER}:${DB_PWD}@`)
}
auth.push(DB_HOSTNAME ?? "localhost")
auth.push(`:${DB_PORT ?? "27017"}`)
if (DB_USER) {
auth.push("/?authMechanism=DEFAULT")
}
auth = auth.join("")
return [
auth,
{
dbName: DB_NAME,
useNewUrlParser: true,
useUnifiedTopology: true,
}
]
}
export default class DBManager {
initialize = async (config) => {
console.log("🔌 Connecting to DB...")
const dbConfig = getConnectionConfig(config ?? process.env)
mongoose.set("strictQuery", false)
const connection = await mongoose.connect(...dbConfig)
.catch((err) => {
console.log(`❌ Failed to connect to DB, retrying...\n`)
console.log(error)
// setTimeout(() => {
// this.initialize()
// }, 1000)
return false
})
if (connection) {
console.log(`✅ Connected to DB.`)
}
}
}

View File

@ -0,0 +1,84 @@
export class AuthorizationError extends Error {
constructor(req, res, message = "This endpoint requires authorization") {
super(message)
this.name = "AuthorizationError"
if (req && res) {
return res.status(this.constructor.statusCode).json({
error: message,
})
}
}
static get statusCode() {
return 401
}
}
export class NotFoundError extends Error {
constructor(req, res, message = "Not found") {
super(message)
this.name = "NotFoundError"
if (req && res) {
return res.status(this.constructor.statusCode).json({
error: message,
})
}
}
static get statusCode() {
return 404
}
}
export class PermissionError extends Error {
constructor(req, res, message = "You don't have permission to do this") {
super(message)
this.name = "PermissionError"
if (req && res) {
return res.status(this.constructor.statusCode).json({
error: message,
})
}
}
static get statusCode() {
return 403
}
}
export class BadRequestError extends Error {
constructor(req, res, message = "Bad request") {
super(message)
this.name = "BadRequestError"
if (req && res) {
return res.status(this.constructor.statusCode).json({
error: message,
})
}
}
static get statusCode() {
return 400
}
}
export class InternalServerError extends Error {
constructor(req, res, message = "Internal server error") {
super(message)
this.name = "InternalServerError"
if (req && res) {
return res.status(this.constructor.statusCode).json({
error: message,
})
}
}
static get statusCode() {
return 500
}
}

View File

@ -0,0 +1,253 @@
// Orginal forked from: Buzut/huge-uploader-nodejs
// Copyright (c) 2018, Quentin Busuttil All rights reserved.
import fs from "node:fs"
import path from "node:path"
import { promisify } from "node:util"
import mimetypes from "mime-types"
import crypto from "node:crypto"
import Busboy from "busboy"
export function getFileHash(file) {
return new Promise((resolve, reject) => {
const hash = crypto.createHash("sha256")
file.on("data", (chunk) => hash.update(chunk))
file.on("end", () => resolve(hash.digest("hex")))
file.on("error", reject)
})
}
export function checkHeaders(headers) {
if (
!headers["uploader-chunk-number"] ||
!headers["uploader-chunks-total"] ||
!headers["uploader-original-name"] ||
!headers["uploader-file-id"] ||
!headers["uploader-chunks-total"].match(/^[0-9]+$/) ||
!headers["uploader-chunk-number"].match(/^[0-9]+$/)
) {
return false
}
return true
}
export function checkTotalSize(maxFileSize, maxChunkSize, totalChunks) {
if (maxChunkSize * totalChunks > maxFileSize) {
return false
}
return true
}
export function cleanChunks(dirPath) {
fs.readdir(dirPath, (err, files) => {
let filesLength = files.length
files.forEach((file) => {
fs.unlink(path.join(dirPath, file), () => {
if (--filesLength === 0) fs.rmdir(dirPath, () => { }) // cb does nothing but required
})
})
})
}
export function createAssembleChunksPromise({
tmpDir,
headers,
}) {
const asyncReadFile = promisify(fs.readFile)
const asyncAppendFile = promisify(fs.appendFile)
const originalMimeType = mimetypes.lookup(headers["uploader-original-name"])
const originalExtension = mimetypes.extension(originalMimeType)
const totalChunks = +headers["uploader-chunks-total"]
const fileId = headers["uploader-file-id"]
const workPath = path.join(tmpDir, fileId)
const chunksPath = path.resolve(workPath, "chunks")
const assembledFilepath = path.join(workPath, `assembled.${originalExtension}`)
let chunkCount = 0
let finalFilepath = null
return () => {
return new Promise((resolve, reject) => {
const onEnd = async () => {
try {
const hash = await getFileHash(fs.createReadStream(assembledFilepath))
finalFilepath = path.resolve(workPath, `${hash}_${Date.now()}.${originalExtension}`)
fs.renameSync(assembledFilepath, finalFilepath)
cleanChunks(chunksPath)
return resolve({
filename: headers["uploader-original-name"],
filepath: finalFilepath,
cachePath: workPath,
hash,
mimetype: originalMimeType,
extension: originalExtension,
})
} catch (error) {
return reject(error)
}
}
const pipeChunk = () => {
asyncReadFile(path.join(chunksPath, chunkCount.toString()))
.then((chunk) => asyncAppendFile(assembledFilepath, chunk))
.then(() => {
// 0 indexed files = length - 1, so increment before comparison
if (totalChunks > ++chunkCount) {
return pipeChunk(chunkCount)
}
return onEnd()
})
.catch(reject)
}
pipeChunk()
})
}
}
export function mkdirIfDoesntExist(dirPath, callback) {
if (!fs.existsSync(dirPath)) {
fs.mkdir(dirPath, { recursive: true }, callback)
}
}
export function handleFile(tmpDir, headers, fileStream) {
const dirPath = path.join(tmpDir, headers["uploader-file-id"])
const chunksPath = path.join(dirPath, "chunks")
const chunkPath = path.join(chunksPath, headers["uploader-chunk-number"])
const chunkCount = +headers["uploader-chunk-number"]
const totalChunks = +headers["uploader-chunks-total"]
let error
let assembleChunksPromise
let finished = false
let writeStream
const writeFile = () => {
writeStream = fs.createWriteStream(chunkPath)
writeStream.on("error", (err) => {
error = err
fileStream.resume()
})
writeStream.on("close", () => {
finished = true
// if all is uploaded
if (chunkCount === totalChunks - 1) {
assembleChunksPromise = createAssembleChunksPromise({
tmpDir,
headers,
})
}
})
fileStream.pipe(writeStream)
}
// make sure chunk is in range
if (chunkCount < 0 || chunkCount >= totalChunks) {
error = new Error("Chunk is out of range")
fileStream.resume()
}
else if (chunkCount === 0) {
// create file upload dir if it's first chunk
mkdirIfDoesntExist(chunksPath, (err) => {
if (err) {
error = err
fileStream.resume()
}
else writeFile()
})
}
else {
// make sure dir exists if it's not first chunk
fs.stat(dirPath, (err) => {
if (err) {
error = new Error("Upload has expired")
fileStream.resume()
}
else writeFile()
})
}
return (callback) => {
if (finished && !error) callback(null, assembleChunksPromise)
else if (error) callback(error)
else {
writeStream.on("error", callback)
writeStream.on("close", () => callback(null, assembleChunksPromise))
}
}
}
export function uploadFile(req, tmpDir, maxFileSize, maxChunkSize) {
return new Promise((resolve, reject) => {
if (!checkHeaders(req.headers)) {
reject(new Error("Missing header(s)"))
return
}
if (!checkTotalSize(maxFileSize, req.headers["uploader-chunks-total"])) {
reject(new Error("File is above size limit"))
return
}
try {
let limitReached = false
let getFileStatus
const busboy = Busboy({ headers: req.headers, limits: { files: 1, fileSize: maxChunkSize * 1000 * 1000 } })
busboy.on("file", (fieldname, fileStream) => {
fileStream.on("limit", () => {
limitReached = true
fileStream.resume()
})
getFileStatus = handleFile(tmpDir, req.headers, fileStream)
})
busboy.on("close", () => {
if (limitReached) {
reject(new Error("Chunk is above size limit"))
return
}
getFileStatus((fileErr, assembleChunksF) => {
if (fileErr) reject(fileErr)
else resolve(assembleChunksF)
})
})
req.pipe(busboy)
}
catch (err) {
reject(err)
}
})
}
export default uploadFile

View File

@ -0,0 +1,44 @@
import { createClient } from "redis"
function composeURL() {
// support for auth
let url = "redis://"
if (process.env.REDIS_PASSWORD && process.env.REDIS_USERNAME) {
url += process.env.REDIS_USERNAME + ":" + process.env.REDIS_PASSWORD + "@"
}
url += process.env.REDIS_HOST ?? "localhost"
if (process.env.REDIS_PORT) {
url += ":" + process.env.REDIS_PORT
}
return url
}
export default () => {
let client = createClient({
url: composeURL(),
})
client.initialize = async () => {
console.log("🔌 Connecting to Redis client...")
await client.connect()
return client
}
// handle when client disconnects unexpectedly to avoid main crash
client.on("error", (error) => {
console.error("❌ Redis client error:", error)
})
// handle when client connects
client.on("connect", () => {
console.log("✅ Redis client connected.")
})
return client
}

View File

@ -0,0 +1,97 @@
const Minio = require("minio")
import path from "path"
export const generateDefaultBucketPolicy = (payload) => {
const { bucketName } = payload
if (!bucketName) {
throw new Error("bucketName is required")
}
return {
Version: "2012-10-17",
Statement: [
{
Action: [
"s3:GetObject"
],
Effect: "Allow",
Principal: {
AWS: [
"*"
]
},
Resource: [
`arn:aws:s3:::${bucketName}/*`
],
Sid: ""
}
]
}
}
export class StorageClient extends Minio.Client {
constructor(options) {
super(options)
this.defaultBucket = String(options.defaultBucket)
this.defaultRegion = String(options.defaultRegion)
}
composeRemoteURL = (key) => {
const _path = path.join(this.defaultBucket, key)
return `${this.protocol}//${this.host}:${this.port}/${_path}`
}
setDefaultBucketPolicy = async (bucketName) => {
const policy = generateDefaultBucketPolicy({ bucketName })
return this.setBucketPolicy(bucketName, JSON.stringify(policy))
}
initialize = async () => {
console.log("🔌 Checking if storage client have default bucket...")
// check connection with s3
const bucketExists = await this.bucketExists(this.defaultBucket).catch(() => {
return false
})
if (!bucketExists) {
console.warn("🪣 Default bucket not exists! Creating new bucket...")
await this.makeBucket(this.defaultBucket, "s3")
// set default bucket policy
await this.setDefaultBucketPolicy(this.defaultBucket)
}
// check if default bucket policy exists
const bucketPolicy = await this.getBucketPolicy(this.defaultBucket).catch(() => {
return null
})
if (!bucketPolicy) {
// set default bucket policy
await this.setDefaultBucketPolicy(this.defaultBucket)
}
console.log("✅ Storage client is ready.")
}
}
export const createStorageClientInstance = (options) => {
return new StorageClient({
...options,
endPoint: process.env.S3_ENDPOINT,
port: Number(process.env.S3_PORT),
useSSL: toBoolean(process.env.S3_USE_SSL),
accessKey: process.env.S3_ACCESS_KEY,
secretKey: process.env.S3_SECRET_KEY,
defaultBucket: process.env.S3_BUCKET,
defaultRegion: process.env.S3_REGION,
})
}
export default createStorageClientInstance

View File

@ -0,0 +1,20 @@
import path from "path"
import createRoutesFromDirectory from "@utils/createRoutesFromDirectory"
import getMiddlewares from "@utils/getMiddlewares"
export default async (router) => {
const routesPath = path.resolve(__dirname, "routes")
const middlewares = await getMiddlewares(["withOptionalAuth"])
for (const middleware of middlewares) {
router.use(middleware)
}
router = createRoutesFromDirectory("routes", routesPath, router)
return {
path: "/upload",
router,
}
}

View File

@ -0,0 +1,73 @@
import path from "path"
import fs from "fs"
import * as Errors from "@classes/Errors"
import FileUpload from "@classes/FileUpload"
import useCompression from "@services/useCompression"
const cachePath = global.cache.constructor.cachePath
export default async (req, res) => {
// extract authentification header
let auth = req.session
if (!auth) {
return new Errors.AuthorizationError(req, res)
}
const userPath = path.join(cachePath, req.session.user_id)
// 10 GB in bytes
const maxFileSize = 10 * 1000 * 1000 * 1000
// 10MB in bytes
const maxChunkSize = 10 * 1000 * 1000
let build = await FileUpload(req, userPath, maxFileSize, maxChunkSize)
.catch((err) => {
console.log("err", err)
new Errors.InternalServerError(req, res, err.message)
return false
})
if (build === false) {
return false
} else {
if (typeof build === "function") {
try {
build = await build()
if (!req.headers["no-compression"]) {
build = await useCompression(build)
}
// compose remote path
const remotePath = `${req.session.user_id}/${path.basename(build.filepath)}`
// upload to storage
await global.storage.fPutObject(process.env.S3_BUCKET, remotePath, build.filepath)
// remove from cache
fs.promises.rm(build.cachePath, { recursive: true, force: true })
// compose url
const url = global.storage.composeRemoteURL(remotePath)
return res.json({
name: build.filename,
id: remotePath,
url: url,
})
} catch (error) {
console.log(error)
return new Errors.InternalServerError(req, res, error.message)
}
}
return res.json({
success: true,
})
}
}

View File

@ -1,11 +1,22 @@
require("dotenv").config()
global.isProduction = process.env.NODE_ENV === "production"
import { webcrypto as crypto } from "crypto"
import path from "path"
import { registerBaseAliases } from "linebridge/dist/server"
registerBaseAliases(undefined, {
globalThis["__root"] = path.resolve(__dirname)
const customAliases = {
"root": globalThis["__root"],
"@services": path.resolve(__dirname, "services"),
})
}
if (!global.isProduction) {
customAliases["comty.js"] = path.resolve(__dirname, "../../comty.js/src")
}
registerBaseAliases(undefined, customAliases)
// patches
const { Buffer } = require("buffer")
@ -41,14 +52,38 @@ global.toBoolean = (value) => {
return false
}
global.isProduction = process.env.NODE_ENV === "production"
import API from "./api"
async function main() {
const mainAPI = new API()
const instance = new API()
await mainAPI.initialize()
await instance.initialize()
// // kill on process exit
// process.on("exit", () => {
// instance.server.close()
// process.exit(0)
// })
// // kill on ctrl+c
// process.on("SIGINT", () => {
// instance.server.close()
// process.exit(0)
// })
// // kill on uncaught exceptions
// process.on("uncaughtException", (error) => {
// console.error(`🆘 [FATAL ERROR] >`, error)
// instance.server.close()
// process.exit(1)
// })
// // kill on unhandled rejections
// process.on("unhandledRejection", (error) => {
// console.error(`🆘 [FATAL ERROR] >`, error)
// instance.server.close()
// process.exit(1)
// })
}
main().catch((error) => {

View File

@ -0,0 +1,25 @@
export default async function (req, res, next) {
// extract authentification header
let auth = req.headers.authorization
if (!auth) {
return res.status(401).json({ error: "Unauthorized, missing token" })
}
auth = auth.replace("Bearer ", "")
// check if authentification is valid
const validation = await comty.rest.session.validateToken(auth).catch((error) => {
return {
valid: false,
}
})
if (!validation.valid) {
return res.status(401).json({ error: "Unauthorized" })
}
req.session = validation.session
return next()
}

View File

@ -0,0 +1,23 @@
export default function (req, res, next) {
// extract authentification header
let auth = req.headers.authorization
if (!auth) {
return next()
}
auth = req.sessionToken = auth.replace("Bearer ", "")
// check if authentification is valid
comty.rest.session.validateToken(auth)
.catch((error) => {
return {
valid: false,
}
})
.then((validation) => {
req.session = validation.session
next()
})
}

View File

@ -0,0 +1,55 @@
export default async (socket, next) => {
try {
const token = socket.handshake.auth.token
if (!token) {
return next(new Error(`auth:token_missing`))
}
const validation = await global.comty.rest.session.validateToken(token).catch((err) => {
console.error(`[${socket.id}] failed to validate session caused by server error`, err)
return {
valid: false,
error: err,
}
})
if (!validation.valid) {
if (validation.error) {
return next(new Error(`auth:server_error`))
}
return next(new Error(`auth:token_invalid`))
}
const session = validation.session
const userData = await global.comty.rest.user.data({
user_id: session.user_id,
}).catch((err) => {
console.error(`[${socket.id}] failed to get user data caused by server error`, err)
return null
})
if (!userData) {
return next(new Error(`auth:user_failed`))
}
try {
socket.userData = userData
socket.token = token
socket.session = session
}
catch (err) {
return next(new Error(`auth:decode_failed`))
}
next()
} catch (error) {
console.error(`[${socket.id}] failed to connect caused by server error`, error)
next(new Error(`auth:authentification_failed`))
}
}

View File

@ -0,0 +1,117 @@
import fs from "node:fs"
import Jimp from "jimp"
import mimetypes from "mime-types"
import videoTranscode from "@services/videoTranscode"
const cachePath = global.cache.constructor.cachePath
const fileTransformer = {
"video/avi": processVideo,
"video/quicktime": processVideo,
"video/mp4": processVideo,
"video/webm": processVideo,
"image/jpeg": processImage,
"image/png": processImage,
"image/gif": processImage,
"image/bmp": processImage,
"image/tiff": processImage,
"image/webp": processImage,
"image/jfif": processImage,
}
const maximuns = {
imageResolution: {
width: 3840,
height: 2160,
},
imageQuality: 80,
}
async function processVideo(file) {
if (!file) {
throw new Error("file is required")
}
const result = await videoTranscode(file.filepath, file.cachePath, {
videoCodec: "libx264",
format: "mp4",
audioBitrate: 128,
videoBitrate: 1024,
})
file.filepath = result.filepath
file.filename = result.filename
return file
}
async function processImage(file) {
if (!file) {
throw new Error("file is required")
}
const { width, height } = await new Promise((resolve, reject) => {
Jimp.read(file.filepath)
.then((image) => {
resolve({
width: image.bitmap.width,
height: image.bitmap.height,
})
})
.catch((err) => {
reject(err)
})
})
if (width > maximuns.imageResolution.width || height > maximuns.imageResolution.height) {
await new Promise((resolve, reject) => {
// calculate max resolution respecting aspect ratio
const resizedResolution = {
width: maximuns.imageResolution.width,
height: maximuns.imageResolution.height,
}
if (width > height) {
resizedResolution.height = Math.floor((height / width) * maximuns.imageResolution.width)
}
if (height > width) {
resizedResolution.width = Math.floor((width / height) * maximuns.imageResolution.height)
}
Jimp.read(file.filepath)
.then((image) => {
image
.resize(resizedResolution.width, resizedResolution.height)
.quality(maximuns.imageQuality)
.write(file.filepath, resolve)
})
.catch((err) => {
reject(err)
})
})
}
return file
}
export default async (file) => {
if (!file) {
throw new Error("file is required")
}
if (!fs.existsSync(file.filepath)) {
throw new Error(`File ${file.filepath} not found`)
}
const fileMimetype = mimetypes.lookup(file.filepath)
if (typeof fileTransformer[fileMimetype] !== "function") {
console.warn(`File (${file.filepath}) has mimetype ${fileMimetype} and will not be processed`)
return file
}
return await fileTransformer[fileMimetype](file)
}

View File

@ -0,0 +1,62 @@
import path from "path"
const ffmpeg = require("fluent-ffmpeg")
const defaultParams = {
audioBitrate: 128,
videoBitrate: 1024,
videoCodec: "libvpx",
audioCodec: "libvorbis",
format: "webm",
}
export default (input, cachePath, params = defaultParams) => {
return new Promise((resolve, reject) => {
const filename = path.basename(input)
const outputFilename = `${filename.split(".")[0]}_ff.${params.format ?? "webm"}`
const outputFilepath = `${cachePath}/${outputFilename}`
console.debug(`[TRANSCODING] Transcoding ${input} to ${outputFilepath}`)
const onEnd = async () => {
console.debug(`[TRANSCODING] Finished transcode ${input} to ${outputFilepath}`)
return resolve({
filepath: outputFilepath,
filename: outputFilename,
})
}
const onError = (err) => {
console.error(`[TRANSCODING] Transcoding ${input} to ${outputFilepath} failed`, err)
return reject(err)
}
let exec = null
const commands = {
input: input,
...params,
output: outputFilepath,
}
// chain methods
Object.keys(commands).forEach((key) => {
if (exec === null) {
exec = ffmpeg(commands[key])
} else {
if (typeof exec[key] !== "function") {
console.warn(`[TRANSCODING] Method ${key} is not a function`)
return false
}
exec = exec[key](commands[key])
}
})
exec
.on("error", onError)
.on("end", onEnd)
.run()
})
}

View File

@ -0,0 +1,8 @@
import cors from "cors"
export default cors({
origin: "*",
methods: ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS", "HEAD", "CONNECT", "TRACE"],
preflightContinue: false,
optionsSuccessStatus: 204,
})

View File

@ -0,0 +1,19 @@
export default (req, res, next) => {
const startHrTime = process.hrtime()
res.on("finish", () => {
const elapsedHrTime = process.hrtime(startHrTime)
const elapsedTimeInMs = elapsedHrTime[0] * 1000 + elapsedHrTime[1] / 1e6
res._responseTimeMs = elapsedTimeInMs
// cut req.url if is too long
if (req.url.length > 100) {
req.url = req.url.substring(0, 100) + "..."
}
console.log(`${req.method} ${res._status_code ?? res.statusCode ?? 200} ${req.url} ${elapsedTimeInMs}ms`)
})
next()
}

View File

@ -0,0 +1,45 @@
import fs from "fs"
function createRoutesFromDirectory(startFrom, directoryPath, router) {
const files = fs.readdirSync(directoryPath)
files.forEach((file) => {
const filePath = `${directoryPath}/${file}`
const stat = fs.statSync(filePath)
if (stat.isDirectory()) {
createRoutesFromDirectory(startFrom, filePath, router)
} else if (file.endsWith(".js") || file.endsWith(".jsx") || file.endsWith(".ts") || file.endsWith(".tsx")) {
let splitedFilePath = filePath.split("/")
// slice the startFrom path
splitedFilePath = splitedFilePath.slice(splitedFilePath.indexOf(startFrom) + 1)
const method = splitedFilePath[0]
let route = splitedFilePath.slice(1, splitedFilePath.length).join("/")
route = route.replace(".jsx", "")
route = route.replace(".js", "")
route = route.replace(".ts", "")
route = route.replace(".tsx", "")
if (route === "index") {
route = "/"
} else {
route = `/${route}`
}
let handler = require(filePath)
handler = handler.default || handler
router[method](route, handler)
}
})
return router
}
export default createRoutesFromDirectory

View File

@ -0,0 +1,46 @@
import fs from "node:fs"
import path from "node:path"
export default async (middlewares, middlewaresPath) => {
if (typeof middlewaresPath === "undefined") {
middlewaresPath = path.resolve(globalThis["__root"], "middlewares")
}
if (!fs.existsSync(middlewaresPath)) {
return undefined
}
if (typeof middlewares === "string") {
middlewares = [middlewares]
}
let fns = []
for await (const middlewareName of middlewares) {
const middlewarePath = path.resolve(middlewaresPath, middlewareName)
if (!fs.existsSync(middlewarePath)) {
console.error(`Middleware ${middlewareName} not found.`)
continue
}
const middleware = require(middlewarePath).default
if (!middleware) {
console.error(`Middleware ${middlewareName} not valid export.`)
continue
}
if (typeof middleware !== "function") {
console.error(`Middleware ${middlewareName} not valid function.`)
continue
}
fns.push(middleware)
}
return fns
}

View File

@ -0,0 +1,18 @@
import resolveUrl from "@utils/resolveUrl"
export default (code, rootURL) => {
const importRegex = /import\s+(?:(?:([\w*\s{},]*)\s+from\s+)?["']([^"']*)["']|["']([^"']*)["'])/g
// replaces all imports with absolute paths
const absoluteImportCode = code.replace(importRegex, (match, p1, p2) => {
let resolved = resolveUrl(rootURL, p2)
if (!p1) {
return `import "${resolved}"`
}
return `import ${p1} from "${resolved}"`
})
return absoluteImportCode
}

View File

@ -0,0 +1,20 @@
export default (from, to) => {
const resolvedUrl = new URL(to, new URL(from, "resolve://"))
if (resolvedUrl.protocol === "resolve:") {
let { pathname, search, hash } = resolvedUrl
if (to.includes("@")) {
const fromUrl = new URL(from)
const toUrl = new URL(to, fromUrl.origin)
pathname = toUrl.pathname
search = toUrl.search
hash = toUrl.hash
}
return pathname + search + hash
}
return resolvedUrl.toString()
}

View File

@ -0,0 +1,23 @@
import fs from "fs"
import path from "path"
async function syncFolder(dir, destPath) {
const files = await fs.promises.readdir(dir)
for await (const file of files) {
const filePath = path.resolve(dir, file)
const desitinationFilePath = `${destPath}/${file}`
const stat = fs.statSync(filePath)
if (stat.isDirectory()) {
await syncFolder(filePath, desitinationFilePath)
} else {
const fileContent = await fs.promises.readFile(filePath)
await global.storage.putObject(process.env.S3_BUCKET, desitinationFilePath, fileContent)
}
}
}
export default syncFolder