diff --git a/packages/file_server/src/classes/CacheService/index.js b/packages/file_server/src/classes/CacheService/index.js deleted file mode 100644 index 429b39e3..00000000 --- a/packages/file_server/src/classes/CacheService/index.js +++ /dev/null @@ -1,72 +0,0 @@ -import fs from "fs" -import path from "path" - -export default class CacheService { - static deletionInterval = 1000 * 60 * 5 - - static cachePath = path.join(process.cwd(), ".cache") - - constructor(params = {}) { - this.params = params - - if (!fs.existsSync(CacheService.cachePath)) { - fs.mkdirSync(CacheService.cachePath, { recursive: true }) - } - } - - intervalMaps = new Map() - - checkDeletionFilepath(filepath) { - try { - const stats = fs.statSync(filepath) - - stats.atime = new Date(stats.atime) - - if (stats.atime.getTime() + CacheService.deletionInterval < Date.now()) { - fs.promises.unlink(filepath) - } else { - return false - } - - return true - - } catch (error) { - console.error(error) - - fs.promises.unlink(filepath) - - return true - } - } - - appendToDeletion(filepath) { - // create a interval of 5 minutes to delete the file - // check the last time the file was accessed and if it was accessed in the last 5 minutes - // reset the interval until the file is not accessed for 5 minutes and then delete it - try { - const createInterval = () => { - let interval = setInterval(async () => { - try { - await this.checkDeletionFilepath(filepath) - - this.intervalMaps.delete(filepath) - - if (!results) { - this.appendToDeletion(filepath) - } - } catch (error) { - return clearInterval(interval) - } - }) - - return interval - } - - this.intervalMaps.set(filepath, createInterval()) - } catch (error) { - console.error(error) - - return fs.promises.unlink(filepath) - } - } -} \ No newline at end of file diff --git a/packages/file_server/src/classes/ComtyClient/index.js b/packages/file_server/src/classes/ComtyClient/index.js deleted file mode 100644 index 0a8fbfdf..00000000 --- a/packages/file_server/src/classes/ComtyClient/index.js +++ /dev/null @@ -1,9 +0,0 @@ -import createClient from "comty.js" - -export default (params = {}) => { - return createClient({ - ...params, - accessKey: process.env.COMTY_ACCESS_KEY, - privateKey: process.env.COMTY_PRIVATE_KEY, - }) -} \ No newline at end of file diff --git a/packages/file_server/src/classes/DbManager/index.js b/packages/file_server/src/classes/DbManager/index.js deleted file mode 100755 index bdffe441..00000000 --- a/packages/file_server/src/classes/DbManager/index.js +++ /dev/null @@ -1,58 +0,0 @@ -import mongoose from "mongoose" - -function getConnectionConfig(obj) { - const { DB_USER, DB_DRIVER, DB_NAME, DB_PWD, DB_HOSTNAME, DB_PORT } = obj - - let auth = [ - DB_DRIVER ?? "mongodb", - "://", - ] - - if (DB_USER && DB_PWD) { - auth.push(`${DB_USER}:${DB_PWD}@`) - } - - auth.push(DB_HOSTNAME ?? "localhost") - auth.push(`:${DB_PORT ?? "27017"}`) - - if (DB_USER) { - auth.push("/?authMechanism=DEFAULT") - } - - auth = auth.join("") - - return [ - auth, - { - dbName: DB_NAME, - useNewUrlParser: true, - useUnifiedTopology: true, - } - ] -} - -export default class DBManager { - initialize = async (config) => { - console.log("🔌 Connecting to DB...") - - const dbConfig = getConnectionConfig(config ?? process.env) - - mongoose.set("strictQuery", false) - - const connection = await mongoose.connect(...dbConfig) - .catch((err) => { - console.log(`❌ Failed to connect to DB, retrying...\n`) - console.log(error) - - // setTimeout(() => { - // this.initialize() - // }, 1000) - - return false - }) - - if (connection) { - console.log(`✅ Connected to DB.`) - } - } -} \ No newline at end of file diff --git a/packages/file_server/src/classes/Errors/index.js b/packages/file_server/src/classes/Errors/index.js deleted file mode 100644 index bc95a60e..00000000 --- a/packages/file_server/src/classes/Errors/index.js +++ /dev/null @@ -1,84 +0,0 @@ -export class AuthorizationError extends Error { - constructor(req, res, message = "This endpoint requires authorization") { - super(message) - this.name = "AuthorizationError" - - if (req && res) { - return res.status(this.constructor.statusCode).json({ - error: message, - }) - } - } - - static get statusCode() { - return 401 - } -} - -export class NotFoundError extends Error { - constructor(req, res, message = "Not found") { - super(message) - this.name = "NotFoundError" - - if (req && res) { - return res.status(this.constructor.statusCode).json({ - error: message, - }) - } - } - - static get statusCode() { - return 404 - } -} - -export class PermissionError extends Error { - constructor(req, res, message = "You don't have permission to do this") { - super(message) - this.name = "PermissionError" - - if (req && res) { - return res.status(this.constructor.statusCode).json({ - error: message, - }) - } - } - - static get statusCode() { - return 403 - } -} - -export class BadRequestError extends Error { - constructor(req, res, message = "Bad request") { - super(message) - this.name = "BadRequestError" - - if (req && res) { - return res.status(this.constructor.statusCode).json({ - error: message, - }) - } - } - - static get statusCode() { - return 400 - } -} - -export class InternalServerError extends Error { - constructor(req, res, message = "Internal server error") { - super(message) - this.name = "InternalServerError" - - if (req && res) { - return res.status(this.constructor.statusCode).json({ - error: message, - }) - } - } - - static get statusCode() { - return 500 - } -} \ No newline at end of file diff --git a/packages/file_server/src/classes/FileUpload/index.js b/packages/file_server/src/classes/FileUpload/index.js deleted file mode 100644 index 5780ac51..00000000 --- a/packages/file_server/src/classes/FileUpload/index.js +++ /dev/null @@ -1,253 +0,0 @@ -// Orginal forked from: Buzut/huge-uploader-nodejs -// Copyright (c) 2018, Quentin Busuttil All rights reserved. - -import fs from "node:fs" -import path from "node:path" -import { promisify } from "node:util" -import mimetypes from "mime-types" -import crypto from "node:crypto" - -import Busboy from "busboy" - -export function getFileHash(file) { - return new Promise((resolve, reject) => { - const hash = crypto.createHash("sha256") - - file.on("data", (chunk) => hash.update(chunk)) - - file.on("end", () => resolve(hash.digest("hex"))) - - file.on("error", reject) - }) -} - -export function checkHeaders(headers) { - if ( - !headers["uploader-chunk-number"] || - !headers["uploader-chunks-total"] || - !headers["uploader-original-name"] || - !headers["uploader-file-id"] || - !headers["uploader-chunks-total"].match(/^[0-9]+$/) || - !headers["uploader-chunk-number"].match(/^[0-9]+$/) - ) { - return false - } - - return true -} - -export function checkTotalSize(maxFileSize, maxChunkSize, totalChunks) { - if (maxChunkSize * totalChunks > maxFileSize) { - return false - } - - return true -} - -export function cleanChunks(dirPath) { - fs.readdir(dirPath, (err, files) => { - let filesLength = files.length - - files.forEach((file) => { - fs.unlink(path.join(dirPath, file), () => { - if (--filesLength === 0) fs.rmdir(dirPath, () => { }) // cb does nothing but required - }) - }) - }) -} - -export function createAssembleChunksPromise({ - tmpDir, - headers, -}) { - const asyncReadFile = promisify(fs.readFile) - const asyncAppendFile = promisify(fs.appendFile) - - const originalMimeType = mimetypes.lookup(headers["uploader-original-name"]) - const originalExtension = mimetypes.extension(originalMimeType) - - const totalChunks = +headers["uploader-chunks-total"] - - const fileId = headers["uploader-file-id"] - const workPath = path.join(tmpDir, fileId) - const chunksPath = path.resolve(workPath, "chunks") - const assembledFilepath = path.join(workPath, `assembled.${originalExtension}`) - - let chunkCount = 0 - let finalFilepath = null - - return () => { - return new Promise((resolve, reject) => { - const onEnd = async () => { - try { - const hash = await getFileHash(fs.createReadStream(assembledFilepath)) - - finalFilepath = path.resolve(workPath, `${hash}_${Date.now()}.${originalExtension}`) - - fs.renameSync(assembledFilepath, finalFilepath) - - cleanChunks(chunksPath) - - return resolve({ - filename: headers["uploader-original-name"], - filepath: finalFilepath, - cachePath: workPath, - hash, - mimetype: originalMimeType, - extension: originalExtension, - }) - } catch (error) { - return reject(error) - } - } - - const pipeChunk = () => { - asyncReadFile(path.join(chunksPath, chunkCount.toString())) - .then((chunk) => asyncAppendFile(assembledFilepath, chunk)) - .then(() => { - // 0 indexed files = length - 1, so increment before comparison - if (totalChunks > ++chunkCount) { - return pipeChunk(chunkCount) - } - - return onEnd() - }) - .catch(reject) - } - - pipeChunk() - }) - } -} - -export function mkdirIfDoesntExist(dirPath, callback) { - if (!fs.existsSync(dirPath)) { - fs.mkdir(dirPath, { recursive: true }, callback) - } -} - -export function handleFile(tmpDir, headers, fileStream) { - const dirPath = path.join(tmpDir, headers["uploader-file-id"]) - const chunksPath = path.join(dirPath, "chunks") - const chunkPath = path.join(chunksPath, headers["uploader-chunk-number"]) - const chunkCount = +headers["uploader-chunk-number"] - const totalChunks = +headers["uploader-chunks-total"] - - let error - let assembleChunksPromise - let finished = false - let writeStream - - const writeFile = () => { - writeStream = fs.createWriteStream(chunkPath) - - writeStream.on("error", (err) => { - error = err - fileStream.resume() - }) - - writeStream.on("close", () => { - finished = true - - // if all is uploaded - if (chunkCount === totalChunks - 1) { - assembleChunksPromise = createAssembleChunksPromise({ - tmpDir, - headers, - }) - } - }) - - fileStream.pipe(writeStream) - } - - // make sure chunk is in range - if (chunkCount < 0 || chunkCount >= totalChunks) { - error = new Error("Chunk is out of range") - fileStream.resume() - } - - else if (chunkCount === 0) { - // create file upload dir if it's first chunk - mkdirIfDoesntExist(chunksPath, (err) => { - if (err) { - error = err - fileStream.resume() - } - - else writeFile() - }) - } - - else { - // make sure dir exists if it's not first chunk - fs.stat(dirPath, (err) => { - if (err) { - error = new Error("Upload has expired") - fileStream.resume() - } - - else writeFile() - }) - } - - return (callback) => { - if (finished && !error) callback(null, assembleChunksPromise) - else if (error) callback(error) - - else { - writeStream.on("error", callback) - writeStream.on("close", () => callback(null, assembleChunksPromise)) - } - } -} - -export function uploadFile(req, tmpDir, maxFileSize, maxChunkSize) { - return new Promise((resolve, reject) => { - if (!checkHeaders(req.headers)) { - reject(new Error("Missing header(s)")) - return - } - - if (!checkTotalSize(maxFileSize, req.headers["uploader-chunks-total"])) { - reject(new Error("File is above size limit")) - return - } - - try { - let limitReached = false - let getFileStatus - - const busboy = Busboy({ headers: req.headers, limits: { files: 1, fileSize: maxChunkSize * 1000 * 1000 } }) - - busboy.on("file", (fieldname, fileStream) => { - fileStream.on("limit", () => { - limitReached = true - fileStream.resume() - }) - - getFileStatus = handleFile(tmpDir, req.headers, fileStream) - }) - - busboy.on("close", () => { - if (limitReached) { - reject(new Error("Chunk is above size limit")) - return - } - - getFileStatus((fileErr, assembleChunksF) => { - if (fileErr) reject(fileErr) - else resolve(assembleChunksF) - }) - }) - - req.pipe(busboy) - } - - catch (err) { - reject(err) - } - }) -} - -export default uploadFile \ No newline at end of file diff --git a/packages/file_server/src/classes/RedisClient/index.js b/packages/file_server/src/classes/RedisClient/index.js deleted file mode 100644 index c46eff3f..00000000 --- a/packages/file_server/src/classes/RedisClient/index.js +++ /dev/null @@ -1,44 +0,0 @@ -import { createClient } from "redis" - -function composeURL() { - // support for auth - let url = "redis://" - - if (process.env.REDIS_PASSWORD && process.env.REDIS_USERNAME) { - url += process.env.REDIS_USERNAME + ":" + process.env.REDIS_PASSWORD + "@" - } - - url += process.env.REDIS_HOST ?? "localhost" - - if (process.env.REDIS_PORT) { - url += ":" + process.env.REDIS_PORT - } - - return url -} - -export default () => { - let client = createClient({ - url: composeURL(), - }) - - client.initialize = async () => { - console.log("🔌 Connecting to Redis client...") - - await client.connect() - - return client - } - - // handle when client disconnects unexpectedly to avoid main crash - client.on("error", (error) => { - console.error("❌ Redis client error:", error) - }) - - // handle when client connects - client.on("connect", () => { - console.log("✅ Redis client connected.") - }) - - return client -} \ No newline at end of file diff --git a/packages/file_server/src/classes/StorageClient/index.js b/packages/file_server/src/classes/StorageClient/index.js deleted file mode 100755 index 8e222366..00000000 --- a/packages/file_server/src/classes/StorageClient/index.js +++ /dev/null @@ -1,97 +0,0 @@ -const Minio = require("minio") -import path from "path" - -export const generateDefaultBucketPolicy = (payload) => { - const { bucketName } = payload - - if (!bucketName) { - throw new Error("bucketName is required") - } - - return { - Version: "2012-10-17", - Statement: [ - { - Action: [ - "s3:GetObject" - ], - Effect: "Allow", - Principal: { - AWS: [ - "*" - ] - }, - Resource: [ - `arn:aws:s3:::${bucketName}/*` - ], - Sid: "" - } - ] - } -} - -export class StorageClient extends Minio.Client { - constructor(options) { - super(options) - - this.defaultBucket = String(options.defaultBucket) - this.defaultRegion = String(options.defaultRegion) - } - - composeRemoteURL = (key) => { - const _path = path.join(this.defaultBucket, key) - - return `${this.protocol}//${this.host}:${this.port}/${_path}` - } - - setDefaultBucketPolicy = async (bucketName) => { - const policy = generateDefaultBucketPolicy({ bucketName }) - - return this.setBucketPolicy(bucketName, JSON.stringify(policy)) - } - - initialize = async () => { - console.log("🔌 Checking if storage client have default bucket...") - - // check connection with s3 - const bucketExists = await this.bucketExists(this.defaultBucket).catch(() => { - return false - }) - - if (!bucketExists) { - console.warn("🪣 Default bucket not exists! Creating new bucket...") - - await this.makeBucket(this.defaultBucket, "s3") - - // set default bucket policy - await this.setDefaultBucketPolicy(this.defaultBucket) - } - - // check if default bucket policy exists - const bucketPolicy = await this.getBucketPolicy(this.defaultBucket).catch(() => { - return null - }) - - if (!bucketPolicy) { - // set default bucket policy - await this.setDefaultBucketPolicy(this.defaultBucket) - } - - console.log("✅ Storage client is ready.") - } -} - -export const createStorageClientInstance = (options) => { - return new StorageClient({ - ...options, - endPoint: process.env.S3_ENDPOINT, - port: Number(process.env.S3_PORT), - useSSL: toBoolean(process.env.S3_USE_SSL), - accessKey: process.env.S3_ACCESS_KEY, - secretKey: process.env.S3_SECRET_KEY, - defaultBucket: process.env.S3_BUCKET, - defaultRegion: process.env.S3_REGION, - }) -} - -export default createStorageClientInstance \ No newline at end of file diff --git a/packages/marketplace_server/src/classes/ComtyClient/index.js b/packages/marketplace_server/src/classes/ComtyClient/index.js deleted file mode 100644 index 0a8fbfdf..00000000 --- a/packages/marketplace_server/src/classes/ComtyClient/index.js +++ /dev/null @@ -1,9 +0,0 @@ -import createClient from "comty.js" - -export default (params = {}) => { - return createClient({ - ...params, - accessKey: process.env.COMTY_ACCESS_KEY, - privateKey: process.env.COMTY_PRIVATE_KEY, - }) -} \ No newline at end of file diff --git a/packages/marketplace_server/src/classes/DbManager/index.js b/packages/marketplace_server/src/classes/DbManager/index.js deleted file mode 100755 index bdffe441..00000000 --- a/packages/marketplace_server/src/classes/DbManager/index.js +++ /dev/null @@ -1,58 +0,0 @@ -import mongoose from "mongoose" - -function getConnectionConfig(obj) { - const { DB_USER, DB_DRIVER, DB_NAME, DB_PWD, DB_HOSTNAME, DB_PORT } = obj - - let auth = [ - DB_DRIVER ?? "mongodb", - "://", - ] - - if (DB_USER && DB_PWD) { - auth.push(`${DB_USER}:${DB_PWD}@`) - } - - auth.push(DB_HOSTNAME ?? "localhost") - auth.push(`:${DB_PORT ?? "27017"}`) - - if (DB_USER) { - auth.push("/?authMechanism=DEFAULT") - } - - auth = auth.join("") - - return [ - auth, - { - dbName: DB_NAME, - useNewUrlParser: true, - useUnifiedTopology: true, - } - ] -} - -export default class DBManager { - initialize = async (config) => { - console.log("🔌 Connecting to DB...") - - const dbConfig = getConnectionConfig(config ?? process.env) - - mongoose.set("strictQuery", false) - - const connection = await mongoose.connect(...dbConfig) - .catch((err) => { - console.log(`❌ Failed to connect to DB, retrying...\n`) - console.log(error) - - // setTimeout(() => { - // this.initialize() - // }, 1000) - - return false - }) - - if (connection) { - console.log(`✅ Connected to DB.`) - } - } -} \ No newline at end of file diff --git a/packages/marketplace_server/src/classes/RedisClient/index.js b/packages/marketplace_server/src/classes/RedisClient/index.js deleted file mode 100644 index c46eff3f..00000000 --- a/packages/marketplace_server/src/classes/RedisClient/index.js +++ /dev/null @@ -1,44 +0,0 @@ -import { createClient } from "redis" - -function composeURL() { - // support for auth - let url = "redis://" - - if (process.env.REDIS_PASSWORD && process.env.REDIS_USERNAME) { - url += process.env.REDIS_USERNAME + ":" + process.env.REDIS_PASSWORD + "@" - } - - url += process.env.REDIS_HOST ?? "localhost" - - if (process.env.REDIS_PORT) { - url += ":" + process.env.REDIS_PORT - } - - return url -} - -export default () => { - let client = createClient({ - url: composeURL(), - }) - - client.initialize = async () => { - console.log("🔌 Connecting to Redis client...") - - await client.connect() - - return client - } - - // handle when client disconnects unexpectedly to avoid main crash - client.on("error", (error) => { - console.error("❌ Redis client error:", error) - }) - - // handle when client connects - client.on("connect", () => { - console.log("✅ Redis client connected.") - }) - - return client -} \ No newline at end of file diff --git a/packages/marketplace_server/src/classes/StorageClient/index.js b/packages/marketplace_server/src/classes/StorageClient/index.js deleted file mode 100755 index 8e222366..00000000 --- a/packages/marketplace_server/src/classes/StorageClient/index.js +++ /dev/null @@ -1,97 +0,0 @@ -const Minio = require("minio") -import path from "path" - -export const generateDefaultBucketPolicy = (payload) => { - const { bucketName } = payload - - if (!bucketName) { - throw new Error("bucketName is required") - } - - return { - Version: "2012-10-17", - Statement: [ - { - Action: [ - "s3:GetObject" - ], - Effect: "Allow", - Principal: { - AWS: [ - "*" - ] - }, - Resource: [ - `arn:aws:s3:::${bucketName}/*` - ], - Sid: "" - } - ] - } -} - -export class StorageClient extends Minio.Client { - constructor(options) { - super(options) - - this.defaultBucket = String(options.defaultBucket) - this.defaultRegion = String(options.defaultRegion) - } - - composeRemoteURL = (key) => { - const _path = path.join(this.defaultBucket, key) - - return `${this.protocol}//${this.host}:${this.port}/${_path}` - } - - setDefaultBucketPolicy = async (bucketName) => { - const policy = generateDefaultBucketPolicy({ bucketName }) - - return this.setBucketPolicy(bucketName, JSON.stringify(policy)) - } - - initialize = async () => { - console.log("🔌 Checking if storage client have default bucket...") - - // check connection with s3 - const bucketExists = await this.bucketExists(this.defaultBucket).catch(() => { - return false - }) - - if (!bucketExists) { - console.warn("🪣 Default bucket not exists! Creating new bucket...") - - await this.makeBucket(this.defaultBucket, "s3") - - // set default bucket policy - await this.setDefaultBucketPolicy(this.defaultBucket) - } - - // check if default bucket policy exists - const bucketPolicy = await this.getBucketPolicy(this.defaultBucket).catch(() => { - return null - }) - - if (!bucketPolicy) { - // set default bucket policy - await this.setDefaultBucketPolicy(this.defaultBucket) - } - - console.log("✅ Storage client is ready.") - } -} - -export const createStorageClientInstance = (options) => { - return new StorageClient({ - ...options, - endPoint: process.env.S3_ENDPOINT, - port: Number(process.env.S3_PORT), - useSSL: toBoolean(process.env.S3_USE_SSL), - accessKey: process.env.S3_ACCESS_KEY, - secretKey: process.env.S3_SECRET_KEY, - defaultBucket: process.env.S3_BUCKET, - defaultRegion: process.env.S3_REGION, - }) -} - -export default createStorageClientInstance \ No newline at end of file diff --git a/packages/music_server/src/classes/ComtyClient/index.js b/packages/music_server/src/classes/ComtyClient/index.js deleted file mode 100644 index 0a8fbfdf..00000000 --- a/packages/music_server/src/classes/ComtyClient/index.js +++ /dev/null @@ -1,9 +0,0 @@ -import createClient from "comty.js" - -export default (params = {}) => { - return createClient({ - ...params, - accessKey: process.env.COMTY_ACCESS_KEY, - privateKey: process.env.COMTY_PRIVATE_KEY, - }) -} \ No newline at end of file diff --git a/packages/music_server/src/classes/DbManager/index.js b/packages/music_server/src/classes/DbManager/index.js deleted file mode 100755 index bdffe441..00000000 --- a/packages/music_server/src/classes/DbManager/index.js +++ /dev/null @@ -1,58 +0,0 @@ -import mongoose from "mongoose" - -function getConnectionConfig(obj) { - const { DB_USER, DB_DRIVER, DB_NAME, DB_PWD, DB_HOSTNAME, DB_PORT } = obj - - let auth = [ - DB_DRIVER ?? "mongodb", - "://", - ] - - if (DB_USER && DB_PWD) { - auth.push(`${DB_USER}:${DB_PWD}@`) - } - - auth.push(DB_HOSTNAME ?? "localhost") - auth.push(`:${DB_PORT ?? "27017"}`) - - if (DB_USER) { - auth.push("/?authMechanism=DEFAULT") - } - - auth = auth.join("") - - return [ - auth, - { - dbName: DB_NAME, - useNewUrlParser: true, - useUnifiedTopology: true, - } - ] -} - -export default class DBManager { - initialize = async (config) => { - console.log("🔌 Connecting to DB...") - - const dbConfig = getConnectionConfig(config ?? process.env) - - mongoose.set("strictQuery", false) - - const connection = await mongoose.connect(...dbConfig) - .catch((err) => { - console.log(`❌ Failed to connect to DB, retrying...\n`) - console.log(error) - - // setTimeout(() => { - // this.initialize() - // }, 1000) - - return false - }) - - if (connection) { - console.log(`✅ Connected to DB.`) - } - } -} \ No newline at end of file diff --git a/packages/music_server/src/classes/Errors/index.js b/packages/music_server/src/classes/Errors/index.js deleted file mode 100644 index bc95a60e..00000000 --- a/packages/music_server/src/classes/Errors/index.js +++ /dev/null @@ -1,84 +0,0 @@ -export class AuthorizationError extends Error { - constructor(req, res, message = "This endpoint requires authorization") { - super(message) - this.name = "AuthorizationError" - - if (req && res) { - return res.status(this.constructor.statusCode).json({ - error: message, - }) - } - } - - static get statusCode() { - return 401 - } -} - -export class NotFoundError extends Error { - constructor(req, res, message = "Not found") { - super(message) - this.name = "NotFoundError" - - if (req && res) { - return res.status(this.constructor.statusCode).json({ - error: message, - }) - } - } - - static get statusCode() { - return 404 - } -} - -export class PermissionError extends Error { - constructor(req, res, message = "You don't have permission to do this") { - super(message) - this.name = "PermissionError" - - if (req && res) { - return res.status(this.constructor.statusCode).json({ - error: message, - }) - } - } - - static get statusCode() { - return 403 - } -} - -export class BadRequestError extends Error { - constructor(req, res, message = "Bad request") { - super(message) - this.name = "BadRequestError" - - if (req && res) { - return res.status(this.constructor.statusCode).json({ - error: message, - }) - } - } - - static get statusCode() { - return 400 - } -} - -export class InternalServerError extends Error { - constructor(req, res, message = "Internal server error") { - super(message) - this.name = "InternalServerError" - - if (req && res) { - return res.status(this.constructor.statusCode).json({ - error: message, - }) - } - } - - static get statusCode() { - return 500 - } -} \ No newline at end of file diff --git a/packages/music_server/src/classes/RedisClient/index.js b/packages/music_server/src/classes/RedisClient/index.js deleted file mode 100644 index c46eff3f..00000000 --- a/packages/music_server/src/classes/RedisClient/index.js +++ /dev/null @@ -1,44 +0,0 @@ -import { createClient } from "redis" - -function composeURL() { - // support for auth - let url = "redis://" - - if (process.env.REDIS_PASSWORD && process.env.REDIS_USERNAME) { - url += process.env.REDIS_USERNAME + ":" + process.env.REDIS_PASSWORD + "@" - } - - url += process.env.REDIS_HOST ?? "localhost" - - if (process.env.REDIS_PORT) { - url += ":" + process.env.REDIS_PORT - } - - return url -} - -export default () => { - let client = createClient({ - url: composeURL(), - }) - - client.initialize = async () => { - console.log("🔌 Connecting to Redis client...") - - await client.connect() - - return client - } - - // handle when client disconnects unexpectedly to avoid main crash - client.on("error", (error) => { - console.error("❌ Redis client error:", error) - }) - - // handle when client connects - client.on("connect", () => { - console.log("✅ Redis client connected.") - }) - - return client -} \ No newline at end of file diff --git a/packages/music_server/src/classes/StorageClient/index.js b/packages/music_server/src/classes/StorageClient/index.js deleted file mode 100755 index 8e222366..00000000 --- a/packages/music_server/src/classes/StorageClient/index.js +++ /dev/null @@ -1,97 +0,0 @@ -const Minio = require("minio") -import path from "path" - -export const generateDefaultBucketPolicy = (payload) => { - const { bucketName } = payload - - if (!bucketName) { - throw new Error("bucketName is required") - } - - return { - Version: "2012-10-17", - Statement: [ - { - Action: [ - "s3:GetObject" - ], - Effect: "Allow", - Principal: { - AWS: [ - "*" - ] - }, - Resource: [ - `arn:aws:s3:::${bucketName}/*` - ], - Sid: "" - } - ] - } -} - -export class StorageClient extends Minio.Client { - constructor(options) { - super(options) - - this.defaultBucket = String(options.defaultBucket) - this.defaultRegion = String(options.defaultRegion) - } - - composeRemoteURL = (key) => { - const _path = path.join(this.defaultBucket, key) - - return `${this.protocol}//${this.host}:${this.port}/${_path}` - } - - setDefaultBucketPolicy = async (bucketName) => { - const policy = generateDefaultBucketPolicy({ bucketName }) - - return this.setBucketPolicy(bucketName, JSON.stringify(policy)) - } - - initialize = async () => { - console.log("🔌 Checking if storage client have default bucket...") - - // check connection with s3 - const bucketExists = await this.bucketExists(this.defaultBucket).catch(() => { - return false - }) - - if (!bucketExists) { - console.warn("🪣 Default bucket not exists! Creating new bucket...") - - await this.makeBucket(this.defaultBucket, "s3") - - // set default bucket policy - await this.setDefaultBucketPolicy(this.defaultBucket) - } - - // check if default bucket policy exists - const bucketPolicy = await this.getBucketPolicy(this.defaultBucket).catch(() => { - return null - }) - - if (!bucketPolicy) { - // set default bucket policy - await this.setDefaultBucketPolicy(this.defaultBucket) - } - - console.log("✅ Storage client is ready.") - } -} - -export const createStorageClientInstance = (options) => { - return new StorageClient({ - ...options, - endPoint: process.env.S3_ENDPOINT, - port: Number(process.env.S3_PORT), - useSSL: toBoolean(process.env.S3_USE_SSL), - accessKey: process.env.S3_ACCESS_KEY, - secretKey: process.env.S3_SECRET_KEY, - defaultBucket: process.env.S3_BUCKET, - defaultRegion: process.env.S3_REGION, - }) -} - -export default createStorageClientInstance \ No newline at end of file