remove shared

This commit is contained in:
SrGooglo 2024-08-26 13:51:03 +00:00
parent 5436678fed
commit 5733503bc0
36 changed files with 0 additions and 1912 deletions

View File

@ -1,72 +0,0 @@
import fs from "fs"
import path from "path"
export default class CacheService {
static deletionInterval = 1000 * 60 * 5
static cachePath = path.join(process.cwd(), ".cache")
constructor(params = {}) {
this.params = params
if (!fs.existsSync(CacheService.cachePath)) {
fs.mkdirSync(CacheService.cachePath, { recursive: true })
}
}
intervalMaps = new Map()
checkDeletionFilepath(filepath) {
try {
const stats = fs.statSync(filepath)
stats.atime = new Date(stats.atime)
if (stats.atime.getTime() + CacheService.deletionInterval < Date.now()) {
fs.promises.unlink(filepath)
} else {
return false
}
return true
} catch (error) {
console.error(error)
fs.promises.unlink(filepath)
return true
}
}
appendToDeletion(filepath) {
// create a interval of 5 minutes to delete the file
// check the last time the file was accessed and if it was accessed in the last 5 minutes
// reset the interval until the file is not accessed for 5 minutes and then delete it
try {
const createInterval = () => {
let interval = setInterval(async () => {
try {
await this.checkDeletionFilepath(filepath)
this.intervalMaps.delete(filepath)
if (!results) {
this.appendToDeletion(filepath)
}
} catch (error) {
return clearInterval(interval)
}
})
return interval
}
this.intervalMaps.set(filepath, createInterval())
} catch (error) {
console.error(error)
return fs.promises.unlink(filepath)
}
}
}

View File

@ -1,9 +0,0 @@
import createClient from "comty.js"
export default (params = {}) => {
return createClient({
...params,
accessKey: process.env.COMTY_ACCESS_KEY,
privateKey: process.env.COMTY_PRIVATE_KEY,
})
}

View File

@ -1,55 +0,0 @@
import mongoose from "mongoose"
function getConnectionConfig(obj) {
const { DB_USER, DB_DRIVER, DB_NAME, DB_PWD, DB_HOSTNAME, DB_PORT, DB_AUTH_SOURCE } = obj
let auth = [
DB_DRIVER ?? "mongodb",
"://",
]
auth.push(DB_HOSTNAME ?? "localhost")
auth.push(`:${DB_PORT ?? "27017"}`)
auth = auth.join("")
const params = {
auth: {},
dbName: DB_NAME,
user: DB_USER,
pass: DB_PWD,
useNewUrlParser: true,
useUnifiedTopology: true,
}
if (DB_AUTH_SOURCE) {
params.auth.authSource = DB_AUTH_SOURCE
}
return [
auth,
params,
]
}
export default class DBManager {
initialize = async (config) => {
console.log(`🔌 Connecting to DB [${process.env.DB_HOSTNAME}]...`)
const dbConfig = getConnectionConfig(config ?? process.env)
mongoose.set("strictQuery", false)
const connection = await mongoose.connect(...dbConfig)
.catch((err) => {
console.log(`❌ Failed to connect to DB, retrying...\n`)
console.log(err)
return false
})
if (connection) {
console.log(`✅ Connected to DB.`)
}
}
}

View File

@ -1,37 +0,0 @@
export default {
name: "NFCTag",
collection: "nfc_tags",
schema: {
user_id: {
type: String,
required: true
},
owner_id: {
type: String,
required: true
},
serial: {
type: String,
required: true
},
alias: {
type: String,
default: "My NFC Tag"
},
active: {
type: Boolean,
default: true
},
behavior: {
type: Object,
default: {
type: "url",
value: "https://comty.app"
}
},
endpoint_url: {
type: String,
default: "https://comty.app/nfc/no_endpoint"
}
}
}

View File

@ -1,28 +0,0 @@
export default {
name: "authorizedServerTokens",
collection: "authorizedServerTokens",
schema: {
client_id: {
type: String,
required: true,
},
token: {
type: String,
required: true,
},
access: {
type: Array,
default: [],
},
name: {
type: String,
},
description: {
type: String,
},
createdAt: {
type: Date,
default: Date.now,
},
}
}

View File

@ -1,11 +0,0 @@
export default {
name: "Badge",
collection: "badges",
schema: {
name: { type: String, required: true },
label: { type: String },
description: { type: String },
icon: { type: String },
color: { type: String },
}
}

View File

@ -1,10 +0,0 @@
export default {
name: "Comment",
collection: "comments",
schema: {
user_id: { type: String, required: true },
parent_id: { type: String, required: true },
message: { type: String, required: true },
created_at: { type: Date, default: Date.now },
}
}

View File

@ -1,15 +0,0 @@
export default {
name: "Config",
collection: "config",
schema: {
key: {
type: String,
required: true
},
value: {
// type can be anything
type: Object,
required: true
},
}
}

View File

@ -1,13 +0,0 @@
export default {
name: "FeaturedEvent",
collection: "featuredEvents",
schema: {
name: { type: String, required: true },
category: { type: String },
description: { type: String },
dates: { type: Object },
location: { type: String },
announcement: { type: Object, required: true },
expired: { type: Boolean, default: false }
}
}

View File

@ -1,12 +0,0 @@
export default {
name: "FeaturedPlaylist",
collection: "featuredPlaylists",
schema: {
title: { type: String, required: true },
description: { type: String },
cover_url: { type: String },
enabled: { type: Boolean, default: true },
genre: { type: String },
playlist_id: { type: String, required: true },
}
}

View File

@ -1,10 +0,0 @@
export default {
name: "FeaturedWallpaper",
collection: "featuredWallpapers",
schema: {
active: { type: Boolean, default: true },
date: { type: Date, default: Date.now },
url: { type: String, required: true },
author: { type: String },
}
}

View File

@ -1,23 +0,0 @@
import mongoose, { Schema } from "mongoose"
import fs from "fs"
import path from "path"
function generateModels() {
let models = {}
const dirs = fs.readdirSync(__dirname).filter(file => file !== "index.js")
dirs.forEach((file) => {
const model = require(path.join(__dirname, file)).default
if (mongoose.models[model.name]) {
return models[model.name] = mongoose.model(model.name)
}
return models[model.name] = mongoose.model(model.name, new Schema(model.schema), model.collection)
})
return models
}
module.exports = generateModels()

View File

@ -1,41 +0,0 @@
export default {
name: "Playlist",
collection: "playlists",
schema: {
user_id: {
type: String,
required: true
},
title: {
type: String,
required: true
},
description: {
type: String
},
list: {
type: Object,
default: [],
required: true
},
cover: {
type: String,
default: "https://storage.ragestudio.net/comty-static-assets/default_song.png"
},
thumbnail: {
type: String,
default: "https://storage.ragestudio.net/comty-static-assets/default_song.png"
},
created_at: {
type: Date,
required: true
},
publisher: {
type: Object,
},
public: {
type: Boolean,
default: true,
},
}
}

View File

@ -1,12 +0,0 @@
export default {
name: "Post",
collection: "posts",
schema: {
user_id: { type: String, required: true },
timestamp: { type: String, required: true },
created_at: { type: Date, default: Date.now, required: true },
message: { type: String },
attachments: { type: Array, default: [] },
flags: { type: Array, default: [] },
}
}

View File

@ -1,14 +0,0 @@
export default {
name: "PostLike",
collection: "post_likes",
schema: {
user_id: {
type: String,
required: true,
},
post_id: {
type: String,
required: true,
}
}
}

View File

@ -1,14 +0,0 @@
export default {
name: "RegenerationToken",
collection: "regenerationTokens",
schema: {
expiredToken: {
type: String,
required: true,
},
refreshToken: {
type: String,
required: true,
}
}
}

View File

@ -1,38 +0,0 @@
export default {
name: "Release",
collection: "releases",
schema: {
user_id: {
type: String,
required: true
},
title: {
type: String,
required: true
},
type: {
type: String,
required: true,
},
list: {
type: Object,
default: [],
required: true
},
cover: {
type: String,
default: "https://storage.ragestudio.net/comty-static-assets/default_song.png"
},
created_at: {
type: Date,
required: true
},
publisher: {
type: Object,
},
public: {
type: Boolean,
default: true,
},
}
}

View File

@ -1,15 +0,0 @@
export default {
name: "Role",
collection: "roles",
schema: {
name: {
type: String,
},
description: {
type: String,
},
apply: {
type: Object,
}
}
}

View File

@ -1,18 +0,0 @@
export default {
name: "SavedPost",
collection: "savedPosts",
schema: {
post_id: {
type: "string",
required: true,
},
user_id: {
type: "string",
required: true,
},
saved_at: {
type: "date",
default: Date.now,
}
}
}

View File

@ -1,24 +0,0 @@
import { Schema } from "mongoose"
export default {
name: "ServerLimit",
collection: "serverLimits",
schema: {
key: {
type: String,
required: true,
},
value: {
type: Schema.Types.Mixed,
required: true,
},
active: {
type: Boolean,
default: true,
},
data: {
type: Object,
required: false,
}
}
}

View File

@ -1,14 +0,0 @@
export default {
name: "Session",
collection: "sessions",
schema: {
session_uuid: { type: String, required: true },
token: { type: String, required: true },
username: { type: String, required: true },
user_id: { type: String, required: true },
date: { type: Number, default: 0 },
location: { type: String, default: "Unknown" },
ip_address: { type: String, default: "Unknown" },
client: { type: String, default: "Unknown" },
}
}

View File

@ -1,14 +0,0 @@
export default {
name: "StreamingCategory",
collection: "streamingCategories",
schema: {
key: {
type: String,
required: true,
},
label: {
type: String,
required: true,
},
}
}

View File

@ -1,37 +0,0 @@
export default {
name: "StreamingProfile",
collection: "streamingProfiles",
schema: {
user_id: {
type: String,
required: true,
},
profile_name: {
type: String,
required: true,
},
stream_key: {
type: String,
required: true,
select: false,
},
info: {
type: Object,
default: {
title: "Untitled",
description: "No description",
category: "other",
thumbnail: null,
}
},
options: {
type: Object,
default: {
connection_protected: true,
private: false,
chatEnabled: true,
drvEnabled: false,
}
}
}
}

View File

@ -1,18 +0,0 @@
export default {
name: "SyncEntry",
collection: "syncEntries",
schema: {
user_id: {
type: "string",
required: true,
},
key: {
type: "string",
required: true,
},
value: {
type: "string",
required: true,
}
}
}

View File

@ -1,53 +0,0 @@
export default {
name: "Track",
collection: "tracks",
schema: {
title: {
type: String,
required: true,
},
album: {
type: String,
},
artist: {
type: String,
},
source: {
type: String,
required: true,
},
metadata: {
type: Object,
},
explicit: {
type: Boolean,
default: false,
},
public: {
type: Boolean,
default: true,
},
cover: {
type: String,
default: "https://storage.ragestudio.net/comty-static-assets/default_song.png"
},
thumbnail: {
type: String,
default: "https://storage.ragestudio.net/comty-static-assets/default_song.png"
},
videoCanvas: {
type: String,
},
spotifyId: {
type: String,
},
lyricsEnabled: {
type: Boolean,
default: true,
},
publisher: {
type: Object,
required: true,
},
}
}

View File

@ -1,17 +0,0 @@
export default {
name: "TrackLike",
collection: "tracks_likes",
schema: {
user_id: {
type: String,
required: true,
},
track_id: {
type: String,
required: true,
},
created_at: {
type: Date,
}
}
}

View File

@ -1,21 +0,0 @@
export default {
name: "User",
collection: "accounts",
schema: {
username: { type: String, required: true },
password: { type: String, required: true, select: false },
email: { type: String, required: true },
description: { type: String, default: null },
public_name: { type: String, default: null },
fullName: { type: String, default: null },
cover: { type: String, default: null },
avatar: { type: String, default: null },
roles: { type: Array, default: [] },
verified: { type: Boolean, default: false },
birthday: { type: Date, default: null },
badges: { type: Array, default: [] },
links: { type: Array, default: [] },
createdAt: { type: String },
created_at: { type: String },
}
}

View File

@ -1,8 +0,0 @@
export default {
name: "UserFollow",
collection: "follows",
schema: {
user_id: { type: String, required: true },
to: { type: String, required: true },
}
}

View File

@ -1,24 +0,0 @@
export default {
name: "Widget",
collection: "widgets",
schema: {
manifest: {
type: Object,
required: true,
},
user_id: {
type: String,
required: true,
},
public: {
type: Boolean,
default: true,
},
created_at: {
type: Date,
},
updated_at: {
type: Date,
},
}
}

View File

@ -1,84 +0,0 @@
export class AuthorizationError extends Error {
constructor(req, res, message = "This endpoint requires authorization") {
super(message)
this.name = "AuthorizationError"
if (req && res) {
return res.status(this.constructor.statusCode).json({
error: message,
})
}
}
static get statusCode() {
return 401
}
}
export class NotFoundError extends Error {
constructor(req, res, message = "Not found") {
super(message)
this.name = "NotFoundError"
if (req && res) {
return res.status(this.constructor.statusCode).json({
error: message,
})
}
}
static get statusCode() {
return 404
}
}
export class PermissionError extends Error {
constructor(req, res, message = "You don't have permission to do this") {
super(message)
this.name = "PermissionError"
if (req && res) {
return res.status(this.constructor.statusCode).json({
error: message,
})
}
}
static get statusCode() {
return 403
}
}
export class BadRequestError extends Error {
constructor(req, res, message = "Bad request") {
super(message)
this.name = "BadRequestError"
if (req && res) {
return res.status(this.constructor.statusCode).json({
error: message,
})
}
}
static get statusCode() {
return 400
}
}
export class InternalServerError extends Error {
constructor(req, res, message = "Internal server error") {
super(message)
this.name = "InternalServerError"
if (req && res) {
return res.status(this.constructor.statusCode).json({
error: message,
})
}
}
static get statusCode() {
return 500
}
}

View File

@ -1,260 +0,0 @@
// Orginal forked from: Buzut/huge-uploader-nodejs
// Copyright (c) 2018, Quentin Busuttil All rights reserved.
import fs from "node:fs"
import path from "node:path"
import { promisify } from "node:util"
import mimetypes from "mime-types"
import crypto from "node:crypto"
import Busboy from "busboy"
export function getFileHash(file) {
return new Promise((resolve, reject) => {
const hash = crypto.createHash("sha256")
file.on("data", (chunk) => hash.update(chunk))
file.on("end", () => resolve(hash.digest("hex")))
file.on("error", reject)
})
}
export function checkHeaders(headers) {
if (
!headers["uploader-chunk-number"] ||
!headers["uploader-chunks-total"] ||
!headers["uploader-original-name"] ||
!headers["uploader-file-id"] ||
!headers["uploader-chunks-total"].match(/^[0-9]+$/) ||
!headers["uploader-chunk-number"].match(/^[0-9]+$/)
) {
return false
}
return true
}
export function checkTotalSize(maxFileSize, maxChunkSize, totalChunks) {
if (maxChunkSize * totalChunks > maxFileSize) {
return false
}
return true
}
export function cleanChunks(dirPath) {
fs.readdir(dirPath, (err, files) => {
let filesLength = files.length
files.forEach((file) => {
fs.unlink(path.join(dirPath, file), () => {
if (--filesLength === 0) fs.rmdir(dirPath, () => { }) // cb does nothing but required
})
})
})
}
export function createAssembleChunksPromise({
tmpDir,
headers,
useDate,
}) {
const asyncReadFile = promisify(fs.readFile)
const asyncAppendFile = promisify(fs.appendFile)
const originalMimeType = mimetypes.lookup(headers["uploader-original-name"])
const originalExtension = mimetypes.extension(originalMimeType)
const totalChunks = +headers["uploader-chunks-total"]
const fileId = headers["uploader-file-id"]
const workPath = path.join(tmpDir, fileId)
const chunksPath = path.resolve(workPath, "chunks")
const assembledFilepath = path.join(workPath, `assembled.${originalExtension}`)
let chunkCount = 0
let finalFilepath = null
return () => {
return new Promise((resolve, reject) => {
const onEnd = async () => {
try {
const hash = await getFileHash(fs.createReadStream(assembledFilepath))
if (useDate) {
finalFilepath = path.resolve(workPath, `${hash}_${Date.now()}.${originalExtension}`)
} else {
finalFilepath = path.resolve(workPath, `${hash}.${originalExtension}`)
}
fs.renameSync(assembledFilepath, finalFilepath)
cleanChunks(chunksPath)
return resolve({
filename: headers["uploader-original-name"],
filepath: finalFilepath,
cachePath: workPath,
hash,
mimetype: originalMimeType,
extension: originalExtension,
})
} catch (error) {
return reject(error)
}
}
const pipeChunk = () => {
asyncReadFile(path.join(chunksPath, chunkCount.toString()))
.then((chunk) => asyncAppendFile(assembledFilepath, chunk))
.then(() => {
// 0 indexed files = length - 1, so increment before comparison
if (totalChunks > ++chunkCount) {
return pipeChunk(chunkCount)
}
return onEnd()
})
.catch(reject)
}
pipeChunk()
})
}
}
export function mkdirIfDoesntExist(dirPath, callback) {
if (!fs.existsSync(dirPath)) {
fs.mkdir(dirPath, { recursive: true }, callback)
}
}
export function handleFile(tmpDir, headers, fileStream) {
const dirPath = path.join(tmpDir, headers["uploader-file-id"])
const chunksPath = path.join(dirPath, "chunks")
const chunkPath = path.join(chunksPath, headers["uploader-chunk-number"])
const useDate = headers["uploader-use-date"] === "true"
const chunkCount = +headers["uploader-chunk-number"]
const totalChunks = +headers["uploader-chunks-total"]
let error
let assembleChunksPromise
let finished = false
let writeStream
const writeFile = () => {
writeStream = fs.createWriteStream(chunkPath)
writeStream.on("error", (err) => {
error = err
fileStream.resume()
})
writeStream.on("close", () => {
finished = true
// if all is uploaded
if (chunkCount === totalChunks - 1) {
assembleChunksPromise = createAssembleChunksPromise({
tmpDir,
headers,
useDate,
})
}
})
fileStream.pipe(writeStream)
}
// make sure chunk is in range
if (chunkCount < 0 || chunkCount >= totalChunks) {
error = new Error("Chunk is out of range")
fileStream.resume()
}
else if (chunkCount === 0) {
// create file upload dir if it's first chunk
mkdirIfDoesntExist(chunksPath, (err) => {
if (err) {
error = err
fileStream.resume()
}
else writeFile()
})
}
else {
// make sure dir exists if it's not first chunk
fs.stat(dirPath, (err) => {
if (err) {
error = new Error("Upload has expired")
fileStream.resume()
}
else writeFile()
})
}
return (callback) => {
if (finished && !error) callback(null, assembleChunksPromise)
else if (error) callback(error)
else {
writeStream.on("error", callback)
writeStream.on("close", () => callback(null, assembleChunksPromise))
}
}
}
export function uploadFile(req, tmpDir, maxFileSize, maxChunkSize) {
return new Promise((resolve, reject) => {
if (!checkHeaders(req.headers)) {
reject(new Error("Missing header(s)"))
return
}
if (!checkTotalSize(maxFileSize, req.headers["uploader-chunks-total"])) {
reject(new Error("File is above size limit"))
return
}
try {
let limitReached = false
let getFileStatus
const busboy = Busboy({ headers: req.headers, limits: { files: 1, fileSize: maxChunkSize * 1000 * 1000 } })
busboy.on("file", (fieldname, fileStream) => {
fileStream.on("limit", () => {
limitReached = true
fileStream.resume()
})
getFileStatus = handleFile(tmpDir, req.headers, fileStream)
})
busboy.on("close", () => {
if (limitReached) {
reject(new Error("Chunk is above size limit"))
return
}
getFileStatus((fileErr, assembleChunksF) => {
if (fileErr) reject(fileErr)
else resolve(assembleChunksF)
})
})
req.pipe(busboy)
}
catch (err) {
reject(err)
}
})
}
export default uploadFile

View File

@ -1,60 +0,0 @@
import { createClient } from "redis"
import { createAdapter } from "@socket.io/redis-adapter"
function composeURL() {
// support for auth
let url = "redis://"
url += process.env.REDIS_HOST ?? "localhost"
if (process.env.REDIS_PORT) {
url += ":" + process.env.REDIS_PORT
}
return url
}
export default ({
withWsAdapter = false
} = {}) => {
let clientOpts = {
url: composeURL(),
}
if (!process.env.REDIS_NO_AUTH) {
if (process.env.REDIS_PASSWORD) {
clientOpts.password = process.env.REDIS_PASSWORD
}
if (process.env.REDIS_USERNAME) {
clientOpts.username = process.env.REDIS_USERNAME
}
}
let client = createClient(clientOpts)
if (withWsAdapter) {
client.subClient = client.duplicate()
client.ioAdapter = global.ioAdapter = createAdapter(client, client.subClient)
}
client.initialize = async () => {
console.log("🔌 Connecting to Redis client...")
await client.connect()
return client
}
// handle when client disconnects unexpectedly to avoid main crash
client.on("error", (error) => {
console.error("❌ Redis client error:", error)
})
// handle when client connects
client.on("connect", () => {
console.log("✅ Redis client connected.", process.env.REDIS_HOST)
})
return client
}

View File

@ -1,134 +0,0 @@
import { SyncEntry } from "@shared-classes/DbModels"
import crypto from "crypto"
export default class SecureSyncEntry {
static get encrytionAlgorithm() {
return "aes-256-cbc"
}
static async set(user_id, key, value) {
if (!user_id) {
throw new Error("Missing user_id")
}
if (!key) {
throw new Error("Missing key")
}
if (!value) {
throw new Error("Missing value")
}
let entry = await SyncEntry.findOne({
user_id,
key,
}).catch(() => null)
const encryptionKey = Buffer.from(process.env.SYNC_ENCRIPT_SECRET, "hex")
const iv = crypto.randomBytes(16)
const cipher = crypto.createCipheriv(SecureSyncEntry.encrytionAlgorithm, encryptionKey, iv)
let encrypted
try {
encrypted = cipher.update(value)
}
catch (error) {
console.error(error)
}
encrypted = Buffer.concat([encrypted, cipher.final()])
if (entry) {
entry.value = iv.toString("hex") + ":" + encrypted.toString("hex")
await entry.save()
return entry
}
entry = new SyncEntry({
user_id,
key,
value: iv.toString("hex") + ":" + encrypted.toString("hex"),
})
await entry.save()
return entry
}
static async get(user_id, key) {
if (!user_id) {
throw new Error("Missing user_id")
}
if (!key) {
throw new Error("Missing key")
}
const entry = await SyncEntry.findOne({
user_id,
key,
}).catch(() => null)
if (!entry) {
return null
}
const encryptionKey = Buffer.from(process.env.SYNC_ENCRIPT_SECRET, "hex")
const iv = Buffer.from(entry.value.split(":")[0], "hex")
const encryptedText = Buffer.from(entry.value.split(":")[1], "hex")
const decipher = crypto.createDecipheriv(SecureSyncEntry.encrytionAlgorithm, encryptionKey, iv)
let decrypted = decipher.update(encryptedText)
decrypted = Buffer.concat([decrypted, decipher.final()])
return decrypted.toString()
}
static async delete(user_id, key) {
if (!user_id) {
throw new Error("Missing user_id")
}
if (!key) {
throw new Error("Missing key")
}
const entry = await SyncEntry.findOne({
user_id,
key,
}).catch(() => null)
if (!entry) {
return null
}
await entry.delete()
return entry
}
static async has(user_id, key) {
if (!user_id) {
throw new Error("Missing user_id")
}
if (!key) {
throw new Error("Missing key")
}
const entry = await SyncEntry.findOne({
user_id,
key,
}).catch(() => null)
return !!entry
}
}

View File

@ -1,97 +0,0 @@
const Minio = require("minio")
import path from "path"
export const generateDefaultBucketPolicy = (payload) => {
const { bucketName } = payload
if (!bucketName) {
throw new Error("bucketName is required")
}
return {
Version: "2012-10-17",
Statement: [
{
Action: [
"s3:GetObject"
],
Effect: "Allow",
Principal: {
AWS: [
"*"
]
},
Resource: [
`arn:aws:s3:::${bucketName}/*`
],
Sid: ""
}
]
}
}
export class StorageClient extends Minio.Client {
constructor(options) {
super(options)
this.defaultBucket = String(options.defaultBucket)
this.defaultRegion = String(options.defaultRegion)
}
composeRemoteURL = (key) => {
const _path = path.join(this.defaultBucket, key)
return `${this.protocol}//${this.host}:${this.port}/${_path}`
}
setDefaultBucketPolicy = async (bucketName) => {
const policy = generateDefaultBucketPolicy({ bucketName })
return this.setBucketPolicy(bucketName, JSON.stringify(policy))
}
initialize = async () => {
console.log("🔌 Checking if storage client have default bucket...")
// check connection with s3
const bucketExists = await this.bucketExists(this.defaultBucket).catch(() => {
return false
})
if (!bucketExists) {
console.warn("🪣 Default bucket not exists! Creating new bucket...")
await this.makeBucket(this.defaultBucket, "s3")
// set default bucket policy
await this.setDefaultBucketPolicy(this.defaultBucket)
}
// check if default bucket policy exists
const bucketPolicy = await this.getBucketPolicy(this.defaultBucket).catch(() => {
return null
})
if (!bucketPolicy) {
// set default bucket policy
await this.setDefaultBucketPolicy(this.defaultBucket)
}
console.log("✅ Storage client is ready.")
}
}
export const createStorageClientInstance = (options) => {
return new StorageClient({
endPoint: process.env.S3_ENDPOINT,
port: Number(process.env.S3_PORT),
useSSL: toBoolean(process.env.S3_USE_SSL),
accessKey: process.env.S3_ACCESS_KEY,
secretKey: process.env.S3_SECRET_KEY,
defaultBucket: process.env.S3_BUCKET,
defaultRegion: process.env.S3_REGION,
...options,
})
}
export default createStorageClientInstance

View File

@ -1,480 +0,0 @@
import axios from "axios"
import qs from "qs"
const TIDAL_CLIENT_ID = process.env.TIDAL_CLIENT_ID
const TIDAL_CLIENT_SECRET = process.env.TIDAL_CLIENT_SECRET
function tranformTrackData(data = {}) {
// TODO: Support Track remixes & versions
data._id = data.id
const coverUID = data.album.cover.replace(/-/g, "/")
data.cover = `https://resources.tidal.com/images/${coverUID}/1080x1080.jpg`
data.artist = data.artists.map(artist => artist.name).join(", ")
data.metadata = {
title: data.title,
artists: data.artists.map(artist => artist.name).join(", "),
artist: data.artists.map(artist => artist.name).join(", "),
album: data.album.title,
duration: data.duration,
}
data.service = "tidal"
return data
}
export default class TidalAPI {
static API_V1 = "https://api.tidal.com/v1"
static API_V2 = "https://api.tidal.com/v2"
static API_USERS = "https://api.tidal.com/v1/users"
static async checkAuthStatus(device_code) {
const data = {
client_id: TIDAL_CLIENT_ID,
device_code: device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
scope: "r_usr+w_usr+w_sub",
}
const response = await axios({
method: "POST",
url: "https://auth.tidal.com/v1/oauth2/token",
params: data,
auth: {
username: TIDAL_CLIENT_ID,
password: TIDAL_CLIENT_SECRET,
},
}).catch(err => {
return false
})
if (!response) {
return false
}
return response.data
}
static async getAuthUrl() {
let data = {
client_id: TIDAL_CLIENT_ID,
scope: "r_usr+w_usr+w_sub",
}
const response = await axios({
method: "POST",
url: "https://auth.tidal.com/v1/oauth2/device_authorization",
params: data,
})
return {
url: "https://" + response.data.verificationUri + "/" + response.data.userCode,
device_code: response.data.deviceCode,
expires_in: response.data.expiresIn,
}
}
static async getUserData({ access_token, user_id, country }) {
const url = `https://api.tidal.com/v1/users/${user_id}?countryCode=${country}`
const response = await axios({
method: "GET",
url,
headers: {
Origin: "http://listen.tidal.com",
Authorization: `Bearer ${access_token}`,
},
})
return response.data
}
static async getTrackPlaybackUrl({ track_id, quality, access_token, country }) {
let params = {
countryCode: country ?? "US",
audioquality: quality ?? "LOSSLESS",
playbackmode: "STREAM",
assetpresentation: "FULL",
}
let response = await axios({
method: "GET",
url: `https://api.tidal.com/v1/tracks/${track_id}/playbackinfopostpaywall`,
params: params,
headers: {
Origin: "http://listen.tidal.com",
Authorization: `Bearer ${access_token}`,
},
})
let decodedManifest = JSON.parse(global.b64Decode(response.data.manifest))
decodedManifest.url = decodedManifest.urls[0]
return {
metadata: {
trackId: track_id,
audioMode: response.data.audioMode,
audioQuality: response.data.audioQuality,
bitDepth: response.data.bitDepth,
bitRate: response.data.bitRate,
mimeType: response.data.manifestMimeType,
},
...decodedManifest,
}
}
static async getTrackMetadata({ track_id, access_token, country }) {
const response = await axios({
method: "GET",
url: `https://api.tidal.com/v1/tracks/${track_id}`,
params: {
countryCode: country,
},
headers: {
Origin: "http://listen.tidal.com",
Authorization: `Bearer ${access_token}`,
},
})
return response.data
}
static async getTrackManifest({ track_id, quality, access_token, country }) {
const playback = await TidalAPI.getTrackPlaybackUrl({
track_id,
quality,
access_token,
country,
})
const metadata = await TidalAPI.getTrackMetadata({
track_id,
access_token,
country,
})
return {
playback,
metadata,
}
}
static async search({ query, type = "all" }) {
let url = `https://api.tidal.com/v1/search`
switch (type) {
case "all":
url = `https://api.tidal.com/v1/search`
break
case "playlists":
url = `https://api.tidal.com/v1/search/playlists`
break
case "artists":
url = `https://api.tidal.com/v1/search/artists`
break
case "albums":
url = `https://api.tidal.com/v1/search/albums`
break
case "tracks":
url = `https://api.tidal.com/v1/search/tracks`
break
}
const response = await axios({
method: "GET",
url: url,
params: {
query: query,
countryCode: "AZ",
},
headers: {
Origin: "http://listen.tidal.com",
"x-tidal-token": TIDAL_CLIENT_ID,
},
})
return response.data.tracks.items.map(item => {
item = tranformTrackData(item)
return item
})
}
/**
* Retrieves favorite tracks for a user.
*
* @param {Object} options - The options for retrieving favorite tracks.
* @param {number} options.user_id - The user ID.
* @param {string} options.country - The country code.
* @param {string} options.access_token - The access token.
* @param {number} [options.limit=100] - The maximum number of tracks to retrieve.
* @param {number} [options.offset=0] - The offset for pagination.
* @return {Object} The response object containing the total length and tracks.
*/
static async getFavoriteTracks({
user_id,
access_token,
country,
limit = 100,
offset = 0,
order = "DATE",
orderDirection = "DESC",
}) {
const response = await axios({
url: `${TidalAPI.API_USERS}/${user_id}/favorites/tracks`,
method: "GET",
headers: {
Origin: "http://listen.tidal.com",
Authorization: `Bearer ${access_token}`,
},
params: {
countryCode: country,
order,
orderDirection,
limit,
offset,
},
})
response.data.items = response.data.items.map(item => {
item.item = tranformTrackData(item.item)
item.item.liked_at = new Date(item.created).getTime()
item.item.liked = true
item.item._computed = true
return item.item
})
return {
total_length: response.data.totalNumberOfItems,
tracks: response.data.items,
}
}
/**
* Retrieves self favorite playlists based on specified parameters.
*
* @param {Object} options - The options object.
* @param {string} options.country - The country code.
* @param {string} options.access_token - The access token for authentication.
* @param {number} [options.limit=100] - The maximum number of playlists to retrieve.
* @param {number} [options.offset=0] - The offset for pagination.
* @param {string} [options.order="DATE"] - The field to order the playlists by.
* @param {string} [options.orderDirection="DESC"] - The direction to order the playlists in.
* @return {Object} - An object containing the total length and items of the playlists.
*/
static async getFavoritePlaylists({
country,
access_token,
limit = 100,
offset = 0,
order = "DATE",
orderDirection = "DESC",
}) {
const params = {
folderId: "root",
deviceType: "BROWSER",
countryCode: country,
offset,
limit,
order,
orderDirection,
}
let response = await axios({
url: `${TidalAPI.API_V2}/my-collection/playlists/folders`,
method: "GET",
headers: {
Origin: "http://listen.tidal.com",
Authorization: `Bearer ${access_token}`,
Server: "envoy",
},
params: params,
})
response.data.items = response.data.items.map(item => {
item.data._id = item.data.uuid
item.data.addedAt = item.addedAt
item.data.created_at = item.addedAt
item.data.service = "tidal"
const coverUID = item.data.squareImage.replace(/-/g, "/")
item.data.cover = `https://resources.tidal.com/images/${coverUID}/1080x1080.jpg`
return item.data
})
return {
total_length: response.data.totalNumberOfItems,
items: response.data.items,
}
}
/**
* Retrieves playlist items based on the provided parameters.
*
* @param {Object} options - The options for retrieving playlist items.
* @param {string} options.uuid - The UUID of the playlist.
* @param {number} options.limit - The maximum number of items to retrieve.
* @param {number} options.offset - The offset of items to start retrieving from.
* @param {string} options.country - The country code for retrieving items.
* @param {string} options.access_token - The access token for authentication.
* @return {Object} An object containing the total length and items of the playlist.
*/
static async getPlaylistItems({
uuid,
limit,
offset,
country,
access_token,
}) {
const params = {
limit,
offset,
countryCode: country,
}
let response = await axios({
url: `${TidalAPI.API_V1}/playlists/${uuid}/items`,
method: "GET",
headers: {
Origin: "http://listen.tidal.com",
Authorization: `Bearer ${access_token}`,
Server: "envoy",
},
params: params,
})
response.data.items = response.data.items.map((item) => {
item = tranformTrackData(item.item)
return item
})
return {
total_length: response.data.totalNumberOfItems,
list: response.data.items,
}
}
/**
* Retrieves playlist data from the Tidal API.
*
* @param {Object} options - The options for retrieving the playlist data.
* @param {string} options.uuid - The UUID of the playlist.
* @param {string} options.access_token - The access token for authentication.
* @param {string} options.country - The country code for the playlist data.
* @param {boolean} [options.resolve_items=false] - Whether to resolve the playlist items.
* @param {number} [options.limit] - The maximum number of items to retrieve.
* @param {number} [options.offset] - The offset for pagination.
* @return {Object} The playlist data retrieved from the Tidal API.
*/
static async getPlaylistData({
uuid,
access_token,
country,
resolve_items = false,
limit,
offset,
}) {
const params = {
countryCode: country,
}
let response = await axios({
url: `${TidalAPI.API_V1}/playlists/${uuid}`,
method: "GET",
headers: {
Origin: "http://listen.tidal.com",
Authorization: `Bearer ${access_token}`,
Server: "envoy",
},
params: params,
})
const coverUID = response.data.squareImage.replace(/-/g, "/")
response.data.cover = `https://resources.tidal.com/images/${coverUID}/1080x1080.jpg`
response.data.service = "tidal"
if (resolve_items) {
response.data.list = await TidalAPI.getPlaylistItems({
uuid,
limit,
offset,
access_token,
country,
})
response.data.total_length = response.data.list.total_length
response.data.list = response.data.list.list
}
return response.data
}
/**
* Toggles the like status of a track.
*
* @param {Object} params - The parameters for toggling the track like.
* @param {string} params.trackId - The ID of the track to toggle the like status.
* @param {boolean} params.to - The new like status. True to like the track, false to unlike it.
* @param {string} params.user_id - The ID of the user performing the action.
* @param {string} params.access_token - The access token for authentication.
* @param {string} params.country - The country code.
* @return {Object} - The response data from the API.
*/
static async toggleTrackLike({
trackId,
to,
user_id,
access_token,
country,
}) {
let url = `${TidalAPI.API_V1}/users/${user_id}/favorites/tracks`
let payload = null
let headers = {
Origin: "http://listen.tidal.com",
Authorization: `Bearer ${access_token}`,
}
if (!to) {
url = `${url}/${trackId}`
} else {
payload = qs.stringify({
trackIds: trackId,
onArtifactNotFound: "FAIL"
})
headers["Content-Type"] = "application/x-www-form-urlencoded"
}
let response = await axios({
url: url,
method: to ? "POST" : "DELETE",
headers: headers,
params: {
countryCode: country,
deviceType: "BROWSER"
},
data: payload
})
return response.data
}
static async togglePlaylistLike(playlist_id) {
}
}

View File

@ -1,120 +0,0 @@
require("dotenv").config()
const path = require("path")
const { registerBaseAliases } = require("linebridge/dist/server")
const { webcrypto: crypto } = require("crypto")
const infisical = require("infisical-node")
global.isProduction = process.env.NODE_ENV === "production"
globalThis["__root"] = path.resolve(process.cwd())
globalThis["__src"] = path.resolve(globalThis["__root"], global.isProduction ? "dist" : "src")
const customAliases = {
"root": globalThis["__root"],
"src": globalThis["__src"],
"@shared-classes": path.resolve(globalThis["__src"], "_shared/classes"),
"@services": path.resolve(globalThis["__src"], "services"),
}
if (!global.isProduction) {
customAliases["comty.js"] = path.resolve(globalThis["__src"], "../../comty.js/src")
customAliases["@shared-classes"] = path.resolve(globalThis["__src"], "shared-classes")
}
if (process.env.USE_LINKED_SHARED) {
customAliases["@shared-classes"] = path.resolve(globalThis["__src"], "shared-classes")
}
registerBaseAliases(globalThis["__src"], customAliases)
// patches
const { Buffer } = require("buffer")
global.b64Decode = (data) => {
return Buffer.from(data, "base64").toString("utf-8")
}
global.b64Encode = (data) => {
return Buffer.from(data, "utf-8").toString("base64")
}
global.nanoid = (t = 21) => crypto.getRandomValues(new Uint8Array(t)).reduce(((t, e) => t += (e &= 63) < 36 ? e.toString(36) : e < 62 ? (e - 26).toString(36).toUpperCase() : e > 62 ? "-" : "_"), "");
Array.prototype.updateFromObjectKeys = function (obj) {
this.forEach((value, index) => {
if (obj[value] !== undefined) {
this[index] = obj[value]
}
})
return this
}
global.toBoolean = (value) => {
if (typeof value === "boolean") {
return value
}
if (typeof value === "string") {
return value.toLowerCase() === "true"
}
return false
}
async function injectEnvFromInfisical() {
const envMode = global.FORCE_ENV ?? global.isProduction ? "prod" : "dev"
console.log(`🔑 Injecting env variables from INFISICAL in [${envMode}] mode...`)
const client = new infisical({
token: process.env.INFISICAL_TOKEN,
})
const secrets = await client.getAllSecrets({
environment: envMode,
attachToProcessEnv: false,
})
// inject to process.env
secrets.forEach((secret) => {
if (!(process.env[secret.secretName])) {
process.env[secret.secretName] = secret.secretValue
}
})
}
function handleExit(instance, code) {
if (instance.server) {
if (typeof instance.server.close === "function") {
instance.server.close()
}
}
return process.exit(code)
}
async function main({
force_infisical,
} = {}) {
const API = require(path.resolve(globalThis["__src"], "api.js")).default
if (force_infisical || process.env.INFISICAL_TOKEN) {
await injectEnvFromInfisical()
}
const instance = new API()
process.on("exit", () => handleExit(instance, 0))
process.on("SIGINT", () => handleExit(instance, 0))
process.on("uncaughtException", () => handleExit(instance, 1))
process.on("unhandledRejection", () => handleExit(instance, 1))
await instance.initialize()
return instance
}
main().catch((error) => {
console.error(`🆘 [FATAL ERROR] >`, error)
})