support for rtengineng

This commit is contained in:
SrGooglo 2025-03-25 23:04:38 +00:00
parent 8cd34fafcb
commit 8ac78cbf5f
19 changed files with 636 additions and 347 deletions

View File

@ -0,0 +1,152 @@
import HyperExpress from "hyper-express"
class RTEngineNG {
constructor(config = {}) {
this.events = new Map()
if (typeof config.events === "object") {
for (const [event, handler] of Object.entries(config.events)) {
this.events.set(event, handler)
}
}
this.onUpgrade = config.onUpgrade || null
this.onConnection = config.onConnection || null
this.onDisconnection = config.onDisconnection || null
}
clients = new Set()
router = new HyperExpress.Router()
senders = {
broadcast: async (event, data) => {
for (const client of this.clients) {
this.sendMessage(client, event, data)
}
},
}
sendMessage = (socket, event, data) => {
const payload = JSON.stringify({ event, data })
socket.send(payload)
}
sendToTopic = (socket, topic, event, data, self = false) => {
const payload = JSON.stringify({
topic,
event,
data,
})
socket.publish(topic, payload)
if (self === true) {
this.sendMessage(socket, event, data)
}
}
sendError = (socket, error) => {
if (error instanceof Error) {
error = error.toString()
}
this.sendMessage(socket, "error", error)
}
handleMessage = async (socket, payload) => {
try {
const message = JSON.parse(payload)
if (typeof message.event !== "string") {
return this.sendError(socket, "Invalid event type")
}
const handler = this.events.get(message.event)
if (typeof handler === "function") {
const handlerSenders = {
...this.senders,
toTopic: (room, event, data, self) => {
this.sendToTopic(socket, room, event, data, self)
},
send: (event, data) => {
this.sendMessage(socket, event, data)
},
error: (error) => {
this.sendError(socket, error)
},
}
await handler(socket, message.data, handlerSenders)
} else {
this.sendError(socket, "Event handler not found")
}
} catch (error) {
this.sendError(socket, error)
}
}
handleConnection = async (socket) => {
socket.on("close", () => this.handleDisconnection(socket))
socket.on("message", (payload) => this.handleMessage(socket, payload))
if (this.onConnection) {
await this.onConnection(socket)
}
this.clients.add(socket)
}
handleDisconnection = async (socket) => {
if (this.onDisconnection) {
await this.onDisconnection(socket)
}
this.clients.delete(socket)
}
handleUpgrade = async (req, res) => {
try {
const context = {
id: nanoid(),
token: req.query.token,
user: null,
httpHeaders: req.headers,
}
if (typeof this.onUpgrade === "function") {
res.upgrade(await this.onUpgrade(context, req.query.token))
} else {
res.upgrade(context)
}
} catch (error) {
console.error("Error upgrading connection:", error)
res.status(401).end()
}
}
registerEvent = (event, handler) => {
this.events.set(event, handler)
}
registerEvents = (obj) => {
for (const [event, handler] of Object.entries(obj)) {
this.registerEvent(event, handler)
}
}
initialize = async (engine) => {
this.engine = engine
this.router.ws("/", this.handleConnection)
this.router.upgrade("/", this.handleUpgrade)
this.engine.app.use("/", this.router)
console.log(`✅ RTEngineNG initialized with ${this.events.size} events`)
}
}
export default RTEngineNG

View File

@ -3,13 +3,11 @@ import { Queue, Worker } from "bullmq"
import { composeURL as composeRedisConnectionString } from "@shared-classes/RedisClient" import { composeURL as composeRedisConnectionString } from "@shared-classes/RedisClient"
export default class TaskQueueManager { export default class TaskQueueManager {
constructor(params, ctx) { constructor(params) {
if (!params) { if (!params) {
throw new Error("Missing params") throw new Error("Missing params")
} }
this.ctx = ctx
this.params = params this.params = params
this.queues = {} this.queues = {}
this.workers = {} this.workers = {}
@ -36,17 +34,15 @@ export default class TaskQueueManager {
} }
registerQueue = (queueObj, options) => { registerQueue = (queueObj, options) => {
const connection = this.ctx.engine.ws.redis
const queue = new Queue(queueObj.id, { const queue = new Queue(queueObj.id, {
connection, connection: options.redisOptions,
defaultJobOptions: { defaultJobOptions: {
removeOnComplete: true, removeOnComplete: true,
}, },
}) })
const worker = new Worker(queueObj.id, queueObj.process, { const worker = new Worker(queueObj.id, queueObj.process, {
connection, connection: options.redisOptions,
concurrency: queueObj.maxJobs ?? 1, concurrency: queueObj.maxJobs ?? 1,
}) })

View File

@ -187,10 +187,20 @@ export default class Gateway {
} }
if (msg.type === "router:ws:register") { if (msg.type === "router:ws:register") {
let target = `http://${this.state.internalIp}:${msg.data.listen_port ?? msg.data.listen?.port}`
if (!msg.data.ws_path && msg.data.namespace) {
target += `/${msg.data.namespace}`
}
if (msg.data.ws_path && msg.data.ws_path !== "/") {
target += `/${msg.data.ws_path}`
}
await this.proxy.register({ await this.proxy.register({
serviceId: id, serviceId: id,
path: `/${msg.data.namespace}`, path: `/${msg.data.namespace}`,
target: `http://${this.state.internalIp}:${msg.data.listen.port}/${msg.data.namespace}`, target: target,
pathRewrite: { pathRewrite: {
[`^/${msg.data.namespace}`]: "", [`^/${msg.data.namespace}`]: "",
}, },

View File

@ -18,9 +18,9 @@ function getHttpServerEngine(extraOptions = {}, handler = () => { }) {
{ {
key: fs.readFileSync(sslKey), key: fs.readFileSync(sslKey),
cert: fs.readFileSync(sslCert), cert: fs.readFileSync(sslCert),
...extraOptions ...extraOptions,
}, },
handler handler,
) )
} else { } else {
return http.createServer(extraOptions, handler) return http.createServer(extraOptions, handler)
@ -74,7 +74,9 @@ export default class Proxy {
} }
if (ws) { if (ws) {
console.log(`🔗 Registering websocket proxy [${path}] -> [${target}]`) console.log(
`🔗 Registering websocket proxy [${path}] -> [${target}]`,
)
this.wsProxys.set(path, proxyObj) this.wsProxys.set(path, proxyObj)
} else { } else {
console.log(`🔗 Registering path proxy [${path}] -> [${target}]`) console.log(`🔗 Registering path proxy [${path}] -> [${target}]`)
@ -141,7 +143,10 @@ export default class Proxy {
setCorsHeaders = (res) => { setCorsHeaders = (res) => {
res.setHeader("Access-Control-Allow-Origin", "*") res.setHeader("Access-Control-Allow-Origin", "*")
res.setHeader("Access-Control-Allow-Methods", "GET,HEAD,PUT,PATCH,POST,DELETE") res.setHeader(
"Access-Control-Allow-Methods",
"GET,HEAD,PUT,PATCH,POST,DELETE",
)
res.setHeader("Access-Control-Allow-Headers", "*") res.setHeader("Access-Control-Allow-Headers", "*")
return res return res
@ -160,11 +165,13 @@ export default class Proxy {
} }
if (sanitizedUrl === "/") { if (sanitizedUrl === "/") {
return res.end(JSON.stringify({ return res.end(
JSON.stringify({
name: pkg.name, name: pkg.name,
version: pkg.version, version: pkg.version,
lb_version: defaults.version lb_version: defaults.version,
})) }),
)
} }
const namespace = `/${sanitizedUrl.split("/")[1]}` const namespace = `/${sanitizedUrl.split("/")[1]}`
@ -173,11 +180,14 @@ export default class Proxy {
if (!route) { if (!route) {
res.statusCode = 404 res.statusCode = 404
res.end(JSON.stringify({ res.end(
JSON.stringify({
error: "Gateway route not found", error: "Gateway route not found",
details: "The gateway route you are trying to access does not exist, maybe the service is down...", details:
"The gateway route you are trying to access does not exist, maybe the service is down...",
namespace: namespace, namespace: namespace,
})) }),
)
return null return null
} }
@ -190,7 +200,7 @@ export default class Proxy {
} }
handleHttpUpgrade = (req, socket, head) => { handleHttpUpgrade = (req, socket, head) => {
const namespace = `/${req.url.split("/")[1]}` const namespace = `/${req.url.split("/")[1].split("?")[0]}`
const route = this.wsProxys.get(namespace) const route = this.wsProxys.get(namespace)
if (!route) { if (!route) {

View File

@ -20,10 +20,10 @@ export default async (obj, token) => {
userData._id = userData._id.toString() userData._id = userData._id.toString()
// inject to obj
obj.user = userData obj.user = userData
obj.token = token obj.token = token
obj.session = validation.data obj.session = validation.data
obj.user = userData
return obj return obj
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "@comty/server", "name": "@comty/server",
"version": "1.27.3@alpha", "version": "1.28.0@alpha",
"license": "ComtyLicense", "license": "ComtyLicense",
"private": true, "private": true,
"workspaces": [ "workspaces": [
@ -12,7 +12,6 @@
"build:bin": "cd build && pkg ./index.js" "build:bin": "cd build && pkg ./index.js"
}, },
"dependencies": { "dependencies": {
"@gullerya/object-observer": "^6.1.3",
"@infisical/sdk": "^2.1.8", "@infisical/sdk": "^2.1.8",
"@opentelemetry/api": "^1.9.0", "@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.56.1", "@opentelemetry/auto-instrumentations-node": "^0.56.1",
@ -20,16 +19,11 @@
"@sentry/node": "^9.4.0", "@sentry/node": "^9.4.0",
"axios": "^1.7.4", "axios": "^1.7.4",
"bcrypt": "^5.1.1", "bcrypt": "^5.1.1",
"bull": "^4.16.5",
"bullmq": "^5.41.5", "bullmq": "^5.41.5",
"chalk": "4.1.2", "comty.js": "^0.61.0",
"comty.js": "^0.60.3",
"dotenv": "^16.4.4",
"http-proxy": "^1.18.1", "http-proxy": "^1.18.1",
"hyper-express": "^6.17.2",
"ioredis": "^5.4.1",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
"linebridge": "^0.22.8", "linebridge": "^0.24.0",
"minio": "^8.0.1", "minio": "^8.0.1",
"module-alias": "^2.2.3", "module-alias": "^2.2.3",
"mongoose": "^8.5.3", "mongoose": "^8.5.3",

View File

@ -0,0 +1,7 @@
import { Extension } from "@db_models"
export default async (req) => {
const extensions = await Extension.find()
return extensions
}

View File

@ -0,0 +1,86 @@
import fs from "node:fs"
import path from "node:path"
import pMap from "p-map"
export default async function b2Upload({
source,
remotePath,
metadata = {},
targetFilename,
isDirectory,
retryNumber = 0,
}) {
if (isDirectory) {
let files = await fs.promises.readdir(source)
files = files.map((file) => {
const filePath = path.join(source, file)
const isTargetDirectory = fs.lstatSync(filePath).isDirectory()
return {
source: filePath,
remotePath: path.join(remotePath, file),
isDirectory: isTargetDirectory,
}
})
await pMap(files, b2Upload, {
concurrency: 5,
})
return {
id: remotePath,
url: `https://${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${remotePath}/${targetFilename}`,
metadata: metadata,
}
}
try {
//await global.b2.authorize()
if (!fs.existsSync(source)) {
throw new OperationError(500, "File not found")
}
const uploadUrl = await global.b2.getUploadUrl({
bucketId: process.env.B2_BUCKET_ID,
})
console.debug(`Uploading object to B2 Storage >`, {
source: source,
remote: remotePath,
})
const data = await fs.promises.readFile(source)
await global.b2.uploadFile({
uploadUrl: uploadUrl.data.uploadUrl,
uploadAuthToken: uploadUrl.data.authorizationToken,
fileName: remotePath,
data: data,
info: metadata,
})
} catch (error) {
console.error(error)
if (retryNumber < 5) {
return await b2Upload({
source,
remotePath,
metadata,
targetFilename,
isDirectory,
retryNumber: retryNumber + 1,
})
}
throw new OperationError(500, "B2 upload failed")
}
return {
id: remotePath,
url: `https://${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${remotePath}`,
metadata: metadata,
}
}

View File

@ -13,7 +13,7 @@ class API extends Server {
static listen_port = process.env.HTTP_LISTEN_PORT ?? 3009 static listen_port = process.env.HTTP_LISTEN_PORT ?? 3009
middlewares = { middlewares = {
...SharedMiddlewares ...SharedMiddlewares,
} }
contexts = { contexts = {

View File

@ -89,18 +89,16 @@ export default async (payload = {}, req) => {
// broadcast post to all users // broadcast post to all users
if (visibility === "public") { if (visibility === "public") {
global.websocket.io global.websocket.senders.toTopic("realtime:feed", "post:new", result[0])
.to("global:posts:realtime")
.emit(`post.new`, result[0])
} }
if (visibility === "private") { if (visibility === "private") {
const userSocket = await global.websocket.find.socketByUserId( const userSockets = await global.websocket.find.clientsByUserId(
post.user_id, post.user_id,
) )
if (userSocket) { for (const userSocket of userSockets) {
userSocket.emit(`post.new`, result[0]) userSocket.emit(`post:new`, result[0])
} }
} }

View File

@ -40,22 +40,22 @@ export default async (payload = {}) => {
throw new OperationError(500, `An error has occurred: ${err.message}`) throw new OperationError(500, `An error has occurred: ${err.message}`)
}) })
// broadcast post to all users
if (post.visibility === "public") { if (post.visibility === "public") {
global.websocket.io global.websocket.senders.toTopic(
.to("global:posts:realtime") "realtime:feed",
.emit(`post.delete`, post) "post:delete",
global.websocket.io post_id,
.to("global:posts:realtime") )
.emit(`post.delete.${post_id}`, post)
} }
if (post.visibility === "private") { if (post.visibility === "private") {
const userSocket = await global.websocket.find.socketByUserId( const userSockets = await global.websocket.find.clientsByUserId(
post.user_id, post.user_id,
) )
if (userSocket) {
userSocket.emit(`post.delete`, post_id) for (const userSocket of userSockets) {
userSocket.emit(`post.delete.${post_id}`, post_id) userSocket.emit(`post:delete`, post_id)
} }
} }

View File

@ -51,12 +51,9 @@ export default async (payload = {}) => {
count: count, count: count,
} }
global.websocket.io.of("/").emit(`post.${post_id}.likes.update`, eventData)
global.websocket.io.of("/").emit(`post.like.update`, eventData)
return { return {
post_id: post_id, post_id: post_id,
liked: to, liked: to,
count: count count: count,
} }
} }

View File

@ -37,22 +37,20 @@ export default async (post_id, update) => {
}) })
if (post.visibility === "public") { if (post.visibility === "public") {
global.websocket.io global.websocket.senders.toTopic(
.to("global:posts:realtime") "realtime:feed",
.emit(`post.update`, result[0]) `post:update`,
global.websocket.io result[0],
.to("global:posts:realtime") )
.emit(`post.update.${post_id}`, result[0])
} }
if (post.visibility === "private") { if (post.visibility === "private") {
const userSocket = await global.websocket.find.socketByUserId( const userSockets = await global.websocket.find.clientsByUserId(
post.user_id, post.user_id,
) )
if (userSocket) { for (const userSocket of userSockets) {
userSocket.emit(`post.update`, result[0]) userSocket.emit(`post:update`, result[0])
userSocket.emit(`post.update.${post_id}`, result[0])
} }
} }

View File

@ -1,5 +1,7 @@
import { VotePoll } from "@db_models" import { VotePoll, Post } from "@db_models"
import stage from "./stage"
// TODO: Implement logic to handle vote poll
export default async (payload = {}) => { export default async (payload = {}) => {
if (!payload.user_id) { if (!payload.user_id) {
throw new OperationError(400, "Missing user_id") throw new OperationError(400, "Missing user_id")
@ -13,6 +15,14 @@ export default async (payload = {}) => {
throw new OperationError(400, "Missing option_id") throw new OperationError(400, "Missing option_id")
} }
let post = await Post.findOne({
_id: payload.post_id,
})
if (!post) {
throw new OperationError(404, "Post not found")
}
let vote = await VotePoll.findOne({ let vote = await VotePoll.findOne({
user_id: payload.user_id, user_id: payload.user_id,
post_id: payload.post_id, post_id: payload.post_id,
@ -24,7 +34,7 @@ export default async (payload = {}) => {
previousOptionId = vote.option_id previousOptionId = vote.option_id
await VotePoll.deleteOne({ await VotePoll.deleteOne({
_id: vote._id.toString() _id: vote._id.toString(),
}) })
} }
@ -38,9 +48,14 @@ export default async (payload = {}) => {
vote = vote.toObject() vote = vote.toObject()
vote.previous_option_id = previousOptionId post = (await stage({ posts: post, for_user_id: payload.user_id }))[0]
global.websocket.io.of("/").emit(`post.poll.vote`, vote) if (post.visibility === "public") {
global.websocket.senders.toTopic("realtime:feed", `post:update`, post)
return vote }
return {
post: post,
vote: vote,
}
} }

View File

@ -3,39 +3,14 @@ import { Server } from "linebridge"
import DbManager from "@shared-classes/DbManager" import DbManager from "@shared-classes/DbManager"
import RedisClient from "@shared-classes/RedisClient" import RedisClient from "@shared-classes/RedisClient"
import TaskQueueManager from "@shared-classes/TaskQueueManager" import TaskQueueManager from "@shared-classes/TaskQueueManager"
import InjectedAuth from "@shared-lib/injectedAuth"
import SharedMiddlewares from "@shared-middlewares" import SharedMiddlewares from "@shared-middlewares"
// wsfast
import HyperExpress from "hyper-express"
class WSFastServer {
router = new HyperExpress.Router()
clients = new Set()
routes = {
connect: async (socket) => {
console.log("Client connected", socket)
},
}
async initialize(engine) {
this.engine = engine
Object.keys(this.routes).forEach((route) => {
this.router.ws(`/${route}`, this.routes[route])
})
this.engine.app.use(this.router)
}
}
export default class API extends Server { export default class API extends Server {
static refName = "posts" static refName = "posts"
static useEngine = "hyper-express-ng"
static enableWebsockets = true static enableWebsockets = true
static routesPath = `${__dirname}/routes`
static wsRoutesPath = `${__dirname}/routes_ws`
static listen_port = process.env.HTTP_LISTEN_PORT ?? 3001 static listen_port = process.env.HTTP_LISTEN_PORT ?? 3001
@ -43,31 +18,43 @@ export default class API extends Server {
...SharedMiddlewares, ...SharedMiddlewares,
} }
handleWsUpgrade = async (context, token, res) => {
context = await InjectedAuth(context, token, res)
if (!context.user) {
res.close(401, "Unauthorized or missing auth token")
return false
}
return res.upgrade(context)
}
handleWsConnection = (socket) => {
console.log(`[WS] @${socket.context.user.username} connected`)
}
handleWsDisconnect = (socket) => {
console.log(`[WS] @${socket.context.user.username} disconnected`)
}
contexts = { contexts = {
db: new DbManager(), db: new DbManager(),
redis: RedisClient(), redis: RedisClient(),
ws: new WSFastServer(this.engine),
} }
queuesManager = new TaskQueueManager( queuesManager = new TaskQueueManager({
{
workersPath: `${__dirname}/queues`, workersPath: `${__dirname}/queues`,
}, })
this,
)
async onInitialize() { async onInitialize() {
await this.contexts.db.initialize() await this.contexts.db.initialize()
await this.contexts.redis.initialize() await this.contexts.redis.initialize()
await this.queuesManager.initialize({ await this.queuesManager.initialize({
redisOptions: this.engine.ws.redis.options, redisOptions: this.contexts.redis.client.options,
}) })
await this.contexts.ws.initialize(this.engine)
global.queues = this.queuesManager global.queues = this.queuesManager
} }
handleWsAuth = require("@shared-lib/handleWsAuth").default
} }
Boot(API) Boot(API)

View File

@ -1,4 +0,0 @@
export default async function (socket) {
console.log(`Socket ${socket.id} connected to realtime posts`)
socket.join("global:posts:realtime")
}

View File

@ -1,4 +0,0 @@
export default async function (socket) {
console.log(`Socket ${socket.id} disconnected from realtime posts`)
socket.leave("global:posts:realtime")
}

View File

@ -0,0 +1,23 @@
// convert routered functions to flat routes,
// eg: { fn:1, nestedfn: { test: 2, test2: 3}} -> { fn:1, nestedfn:test: 2, nestedfn:test2: 3}
export default function flatRouteredFunctions(obj, prefix = "", acc = {}) {
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const value = obj[key]
// Determine the new key: if there's a prefix, add it with a colon separator.
const newKey = prefix ? `${prefix}:${key}` : key
// If value is a non-null object (and not an array), recursively flatten it.
if (
value !== null &&
typeof value === "object" &&
!Array.isArray(value)
) {
flatRouteredFunctions(value, newKey, acc)
} else {
acc[newKey] = value
}
}
}
return acc
}

View File

@ -0,0 +1,24 @@
import fs from "node:fs/promises"
import path from "node:path"
export default async function getRouteredFunctions(dir) {
const files = await fs.readdir(dir)
const result = {}
for (const file of files) {
const filePath = path.join(dir, file)
const stat = await fs.stat(filePath)
const eventName = path.basename(file).split(".")[0]
if (stat.isFile()) {
const event = await import(filePath)
result[eventName] = event.default
} else if (stat.isDirectory()) {
result[eventName] = await getRouteredFunctions(filePath)
}
}
return result
}