Merge pull request #109 from ragestudio/new-server-arch

New server arch
This commit is contained in:
srgooglo 2023-11-28 20:00:26 +01:00 committed by GitHub
commit 580f20f03f
78 changed files with 674 additions and 1293 deletions

1
.gitignore vendored
View File

@ -24,6 +24,7 @@
/**/**/.evite
/**/**/uploads
/**/**/d_data
/**/**/redis_data
/**/**/*.tar
/**/**/*.7z
/**/**/*.zip

3
.gitmodules vendored
View File

@ -1,3 +1,6 @@
[submodule "evite"]
path = evite
url = https://github.com/ragestudio/evite
[submodule "linebridge"]
path = linebridge
url = https://github.com/ragestudio/linebridge

3
.ignorepackages Normal file
View File

@ -0,0 +1,3 @@
rtengine
wrapper
comty.js

20
.vscode/settings.json vendored
View File

@ -1,25 +1,25 @@
{
"discord.enabled": false,
"workbench.colorCustomizations": {
"activityBar.activeBackground": "#65c89b",
"activityBar.background": "#65c89b",
"activityBar.activeBackground": "#ff9396",
"activityBar.background": "#ff9396",
"activityBar.foreground": "#15202b",
"activityBar.inactiveForeground": "#15202b99",
"activityBarBadge.background": "#945bc4",
"activityBarBadge.background": "#048000",
"activityBarBadge.foreground": "#e7e7e7",
"commandCenter.border": "#15202b99",
"sash.hoverBorder": "#65c89b",
"statusBar.background": "#42b883",
"sash.hoverBorder": "#ff9396",
"statusBar.background": "#ff6064",
"statusBar.foreground": "#15202b",
"statusBarItem.hoverBackground": "#359268",
"statusBarItem.remoteBackground": "#42b883",
"statusBarItem.hoverBackground": "#ff2d32",
"statusBarItem.remoteBackground": "#ff6064",
"statusBarItem.remoteForeground": "#15202b",
"titleBar.activeBackground": "#42b883",
"titleBar.activeBackground": "#ff6064",
"titleBar.activeForeground": "#15202b",
"titleBar.inactiveBackground": "#42b88399",
"titleBar.inactiveBackground": "#ff606499",
"titleBar.inactiveForeground": "#15202b99"
},
"peacock.remoteColor": "#42b883",
"peacock.remoteColor": "#ff6064",
"docify.inlineComments": true,
"docify.moreExpressiveComments": true,
"docify.sidePanelReviewMode": false,

13
changelogs/v0-58-2.md Normal file
View File

@ -0,0 +1,13 @@
* [+6/-6][chat_server | file_server | marketplace_server | music_server | server | sync_server] [bump version](https://github.com/ragestudio/comty/commit/9f745b706988f301dffc2308ea3cfaa7ce4150cc) - by [@SrGooglo](https://github.com/srgooglo)
* [+11/-11][app | chat_server | comty.js | file_server | marketplace_server | music_server | server | sync_server | wrapper] [Bump version to 0.58.2](https://github.com/ragestudio/comty/commit/5195f019f609841e1f041ddd2d6d7357bd6cdd80) - by [@SrGooglo](https://github.com/srgooglo)
* [+28/-1][app] [add touch effect](https://github.com/ragestudio/comty/commit/a0098296885d5d059e4bf9129be1504f2a734883) - by [@SrGooglo](https://github.com/srgooglo)
* [+374/-85][app | comty.js | music_server | sync_server | classes] [Supporting multiplatform track likes](https://github.com/ragestudio/comty/commit/4add14652c387e2c23b557e3d68188888c9117c6) - by [@SrGooglo](https://github.com/srgooglo)
* [+482/-127][app] [support sliders with profiles](https://github.com/ragestudio/comty/commit/fec281dece9263dd446e7dd460f4f4af471af8b4) - by [@SrGooglo](https://github.com/srgooglo)
* [+2/-1][app] [force `max-width`](https://github.com/ragestudio/comty/commit/510ad757bb73ac4ed0d5e064105c94fd4fba6edb) - by [@SrGooglo](https://github.com/srgooglo)
* [+17/-0][v0-58-1.md] [added changelog](https://github.com/ragestudio/comty/commit/6387a22edddbb9fab47cc5f6ee893c67390d7cbb) - by [@SrGooglo](https://github.com/srgooglo)

View File

@ -54,14 +54,21 @@ services:
env_file:
- ./env_files/file.production.env
redis:
image: docker.dragonflydb.io/dragonflydb/dragonfly
image: docker.dragonflydb.io/dragonflydb/dragonfly:latest
restart: unless-stopped
ulimits:
memlock: -1
command:
- "--requirepass=changeme2424"
networks:
- internal_network
env_file:
- ./env_files/redis.env
ports:
- "6379:6379"
volumes:
- ./redis_data:/data
networks:
internal_network:

View File

@ -3,8 +3,8 @@ module.exports = {
{
name: "main_api",
script: "./dist/index.js",
// instances: "max",
// exec_mode: "cluster",
instances: "max",
exec_mode: "cluster",
env: {
"NODE_ENV": "production",
"MAIN_LISTEN_PORT": 5000,
@ -64,7 +64,7 @@ module.exports = {
{
name: "sync_api",
script: "./dist/index.js",
env:{
env: {
"NODE_ENV": "production",
"HTTP_LISTEN_PORT": 5005,
},

1
linebridge Submodule

@ -0,0 +1 @@
Subproject commit 13b01b21d6b63ecddd8625781a69355861d38db5

View File

@ -1,5 +1,6 @@
{
"defaultVars": {
"fontScale": "1",
"backgroundBlur": "10px",
"colorPrimary": "#ff6064",
"app-color": "#ff6064",

View File

@ -1,6 +1,8 @@
import React from "react"
import * as antd from "antd"
import { version as linebridgeVersion } from "linebridge/package.json"
import { Icons } from "components/Icons"
import config from "config"
@ -13,27 +15,6 @@ const connectionsTooltipStrings = {
warning: "This connection is secure but the server cannot be verified on the trusted certificate authority.",
}
const Footer = () => {
const isDevMode = window.__evite?.env?.NODE_ENV !== "production"
return <div className="footer">
<div>
<div>{config.app?.siteName}</div>
<div>
<antd.Tag>
<Icons.Tag />v{window.app.version}
</antd.Tag>
</div>
<div>
<antd.Tag color={isDevMode ? "magenta" : "green"}>
{isDevMode ? <Icons.Triangle /> : <Icons.Box />}
{isDevMode ? "development" : "production"}
</antd.Tag>
</div>
</div>
</div>
}
const latencyToColor = (latency, type) => {
switch (type) {
case "http": {
@ -98,19 +79,6 @@ export default {
}
}
const fetchServerHealth = async () => {
const response = await app.cores.api.customRequest({
method: "GET",
url: "/server/health",
}).catch(() => null)
console.log(response.data)
if (response) {
setServerHealth(response.data)
}
}
const measurePing = async () => {
const result = await app.cores.api.measurePing()
@ -123,13 +91,11 @@ export default {
checkServerVersion()
checkServerOrigin()
fetchServerHealth()
measurePing()
setCapacitorInfo()
const measureInterval = setInterval(() => {
fetchServerHealth()
measurePing()
}, 3000)
@ -237,20 +203,6 @@ export default {
</div>
</div>
<div className="field">
<div className="field_header">
<h3><Icons.MdDataUsage /> Instance usage</h3>
</div>
<div className="field_value">
<antd.Progress
percent={serverHealth?.cpuUsage.percent ?? 0}
status="active"
showInfo={false}
/>
</div>
</div>
<div className="inline_field">
<div className="field_header">
<div className="field_icon">
@ -287,7 +239,21 @@ export default {
<Icons.MdInfo />
</div>
<p>Evite Engine</p>
<p>Linebridge Engine</p>
</div>
<div className="field_value">
{linebridgeVersion ?? globalThis._linebrige_version ?? "Unknown"}
</div>
</div>
<div className="inline_field">
<div className="field_header">
<div className="field_icon">
<Icons.MdInfo />
</div>
<p>Evite Framework</p>
</div>
<div className="field_value">

View File

@ -103,6 +103,32 @@ export default {
},
storaged: true
},
{
id: "style.uiFontScale",
group: "aspect",
component: "Slider",
title: "UI font scale",
description: "Change the font scale of the application.",
props: {
min: 1,
max: 1.2,
step: 0.01,
tooltip: {
formatter: (value) => `${value}x`
}
},
defaultValue: () => {
return app.cores.style.getValue("fontScale")
},
onUpdate: (value) => {
app.cores.style.modify({
"fontScale": value
})
return value
},
storaged: true
},
{
id: "style.uiFont",
group: "aspect",

View File

@ -66,7 +66,7 @@
"jsmediatags": "^3.9.7",
"jwt-decode": "3.1.2",
"less": "4.1.2",
"linebridge": "0.15.7",
"linebridge": "0.16.0",
"lottie-react": "^2.4.0",
"lru-cache": "^10.0.0",
"luxon": "^3.0.4",
@ -115,7 +115,6 @@
"tone": "^14.7.77",
"tseep": "^1.1.1",
"ua-parser-js": "^1.0.36",
"uuid": "^9.0.0",
"virtual-scroller": "^1.12.3",
"wait-on": "^6.0.1"
},

View File

@ -408,7 +408,10 @@ class ComtyApp extends React.Component {
app.eventBus.emit("layout.animations.fadeIn")
},
"auth:logout_success": async () => {
app.cores.api.disconnectWebsockets()
app.navigation.goAuth()
await this.flushState()
},
"session.invalid": async (error) => {

View File

@ -11,7 +11,6 @@ export default (props) => {
const fetchConnectedFriends = async () => {
const result = await User.getConnectedUsersFollowing().catch((err) => {
console.error(err)
app.message.error("Failed to fetch connected friends")
return null
})
@ -47,14 +46,16 @@ export default (props) => {
return <div className="connectedFriends">
{
connectedUsers.length === 0 ?
<span>No connected friends</span> :
connectedUsers.map((user_id, index) => {
connectedUsers.length === 0 && <span>No connected friends</span>
}
{
connectedUsers.length > 0 && connectedUsers.map((user, index) => {
return <div
key={index}
className="item"
>
<UserPreview user_id={user_id} />
<UserPreview user_id={user._id} />
</div>
})
}

View File

@ -28,6 +28,7 @@
h1 {
font-family: "Space Grotesk", sans-serif;
margin: 0;
font-size: 1.8rem;
}
h3 {

View File

@ -17,7 +17,7 @@ const ExtraActions = (props) => {
app.isMobile && <Button
type="ghost"
icon={<Icons.MdAbc />}
disabled={!ctx.track_manifest.lyricsEnabled}
disabled={!ctx.track_manifest?.lyricsEnabled}
/>
}
{

View File

@ -154,7 +154,7 @@ export default class PostCard extends React.PureComponent {
}
render() {
return <div
return <article
key={this.props.index}
id={this.props.data._id}
style={this.props.style}
@ -211,6 +211,6 @@ export default class PostCard extends React.PureComponent {
post_id={this.props.data._id}
visible={this.state.open}
/>
</div>
</article>
}
}

View File

@ -121,7 +121,7 @@ function extendsWidgetClass(parentClass, ctx) {
const generateRemoteComponent = (props) => {
return lodable(async () => {
try {
let virtualModule = await import(props.url)
let virtualModule = await import(/* @vite-ignore */props.url)
virtualModule = virtualModule.default

View File

@ -5,7 +5,7 @@ import createClient from "comty.js"
import measurePing from "comty.js/handlers/measurePing"
import request from "comty.js/handlers/request"
import useRequest from "comty.js/hooks/useRequest"
import { reconnectWebsockets } from "comty.js"
import { reconnectWebsockets, disconnectWebsockets } from "comty.js"
export default class APICore extends Core {
static namespace = "api"
@ -25,6 +25,7 @@ export default class APICore extends Core {
measurePing: measurePing,
useRequest: useRequest,
reconnectWebsockets: reconnectWebsockets,
disconnectWebsockets: disconnectWebsockets,
}
listenEvent(key, handler, instance) {

View File

@ -9,7 +9,35 @@
*:before,
*:after {
box-sizing: inherit;
//font-size: calc(1rem * var(--fontScale));
font-size: calc(1em * var(--fontScale));
}
h1 {
font-size: calc(1.4em * var(--fontScale));
}
h2 {
font-size: calc(1.2em * var(--fontScale));
}
h3 {
font-size: calc(1em * var(--fontScale));
}
h4 {
font-size: calc(1em * var(--fontScale));
}
h5 {
font-size: calc(1em* var(--fontScale));
}
h6 {
font-size: calc(1em * var(--fontScale));
}
p {
font-size: calc(1em * var(--fontScale));
}
#nprogress {
@ -162,7 +190,6 @@ svg {
/* disable the IOS popup when long-press on a link */
}
// LAYOUT
.app_layout {
position: relative;

View File

@ -3,8 +3,8 @@
"version": "0.58.2",
"main": "dist/index.js",
"scripts": {
"build": "corenode-cli build",
"dev": "nodemon --ignore dist/ --exec corenode-node ./src/index.js",
"build": "hermes build",
"dev": "nodemon --ignore dist/ --exec hermes-node ./src/index.js",
"run:prod": "cross-env NODE_ENV=production node ./dist/index.js"
},
"shared": {
@ -20,15 +20,10 @@
"license": "MIT",
"dependencies": {
"@foxify/events": "^2.1.0",
"@socket.io/cluster-adapter": "^0.2.2",
"@socket.io/sticky": "^1.0.3",
"@socket.io/redis-adapter": "^8.2.1",
"@socket.io/redis-emitter": "^5.1.0",
"axios": "^1.4.0",
"bcrypt": "5.0.1",
"comty.js": "^0.58.2",
"connect-mongo": "^4.6.0",
"corenode": "0.28.26",
"cors": "^2.8.5",
"dotenv": "^16.0.3",
"express": "^4.18.2",
@ -40,11 +35,11 @@
"moment-timezone": "0.5.37",
"mongoose": "^6.9.0",
"morgan": "^1.10.0",
"nanoid": "3.2.0",
"redis": "^4.6.6",
"socket.io": "^4.5.4"
},
"devDependencies": {
"@ragestudio/hermes": "^0.1.0",
"cross-env": "^7.0.3",
"nodemon": "^2.0.15"
}

View File

@ -1,3 +1,3 @@
import path from "path"
global.FORCE_ENV = "prod"
require(path.resolve(process.cwd(), "../../shared/lib/api_wrapper"))

View File

@ -23,7 +23,7 @@ export default async (socket, next) => {
return next(new Error(`auth:token_invalid`))
}
const session = validation.session
const session = validation.data
const userData = await global.comty.rest.user.data({
user_id: session.user_id,

View File

@ -4,7 +4,7 @@
"main": "./dist/index.js",
"author": "RageStudio <support@ragestudio.net>",
"scripts": {
"build": "corenode-cli build"
"build": "hermes build"
},
"files": [
"dist"
@ -20,6 +20,7 @@
"socket.io-client": "^4.6.1"
},
"devDependencies": {
"@ragestudio/hermes": "^0.1.0",
"corenode": "^0.28.26"
}
}

View File

@ -7,6 +7,12 @@ export default async () => {
new Promise(async (resolve) => {
const start = Date.now()
const failTimeout = setTimeout(() => {
timings.http = "failed"
resolve()
}, 10000)
request({
method: "GET",
url: "/ping",
@ -15,35 +21,34 @@ export default async () => {
// set http timing in ms
timings.http = Date.now() - start
failTimeout && clearTimeout(failTimeout)
resolve()
})
.catch(() => {
timings.http = "failed"
resolve()
})
setTimeout(() => {
timings.http = "failed"
resolve()
}, 10000)
}),
new Promise((resolve) => {
const start = Date.now()
const failTimeout = setTimeout(() => {
timings.ws = "failed"
resolve()
}, 10000)
__comty_shared_state.wsInstances["default"].on("pong", () => {
timings.ws = Date.now() - start
failTimeout && clearTimeout(failTimeout)
resolve()
})
__comty_shared_state.wsInstances["default"].emit("ping")
setTimeout(() => {
timings.ws = "failed"
resolve()
}, 10000)
})
]

View File

@ -67,7 +67,7 @@ export async function createWebsockets() {
if (remotes[key].useClassicAuth && remotes[key].noAuth !== true) {
// try to auth
instance.emit("authenticate", {
instance.emit("auth", {
token: SessionModel.token,
})
}
@ -95,6 +95,16 @@ export async function createWebsockets() {
}
}
export async function disconnectWebsockets() {
const instances = globalThis.__comty_shared_state.wsInstances
for (let [key, instance] of Object.entries(instances)) {
if (instance.connected) {
instance.disconnect()
}
}
}
export async function reconnectWebsockets({ force = false } = {}) {
const instances = globalThis.__comty_shared_state.wsInstances

View File

@ -50,8 +50,6 @@ export default {
default: {
origin: composeRemote("default"),
hasWebsocket: true,
useClassicAuth: true,
autoconnect: true,
},
chat: {
origin: composeRemote("chat"),
@ -63,9 +61,11 @@ export default {
},
livestreaming: {
origin: composeRemote("livestreaming"),
hasWebsocket: false,
},
marketplace: {
origin: composeRemote("marketplace"),
hasWebsocket: false,
},
files: {
origin: composeRemote("files"),

View File

@ -3,8 +3,8 @@
"version": "0.58.2",
"main": "dist/index.js",
"scripts": {
"build": "corenode-cli build",
"dev": "cross-env NODE_ENV=development nodemon --ignore dist/ --exec corenode-node ./src/index.js",
"build": "hermes build",
"dev": "cross-env NODE_ENV=development nodemon --ignore dist/ --exec hermes-node ./src/index.js",
"run:prod": "cross-env NODE_ENV=production node ./dist/index.js"
},
"shared": {
@ -28,7 +28,6 @@
"comty.js": "^0.58.2",
"connect-mongo": "^4.6.0",
"content-range": "^2.0.2",
"corenode": "0.28.26",
"cors": "^2.8.5",
"dotenv": "^16.0.3",
"express": "^4.18.2",
@ -45,7 +44,6 @@
"moment-timezone": "^0.5.40",
"mongoose": "^6.9.0",
"morgan": "^1.10.0",
"nanoid": "3.2.0",
"normalize-url": "^8.0.0",
"p-map": "4.0.0",
"p-queue": "^7.3.4",
@ -53,6 +51,7 @@
"split-chunk-merge": "^1.0.0"
},
"devDependencies": {
"@ragestudio/hermes": "^0.1.0",
"chai": "^4.3.7",
"cross-env": "^7.0.3",
"mocha": "^10.2.0",

View File

@ -1,3 +1,4 @@
import path from "path"
global.FORCE_ENV = "prod"
require(path.resolve(process.cwd(), "../../shared/lib/api_wrapper"))

View File

@ -19,7 +19,7 @@ export default async function (req, res, next) {
return res.status(401).json({ error: "Unauthorized" })
}
req.session = validation.session
req.session = validation.data
return next()
}

View File

@ -16,7 +16,7 @@ export default function (req, res, next) {
}
})
.then((validation) => {
req.session = validation.session
req.session = validation.data
next()
})

View File

@ -23,7 +23,7 @@ export default async (socket, next) => {
return next(new Error(`auth:token_invalid`))
}
const session = validation.session
const session = validation.data
const userData = await global.comty.rest.user.data({
user_id: session.user_id,

View File

@ -3,8 +3,8 @@
"version": "0.58.2",
"main": "dist/index.js",
"scripts": {
"build": "corenode-cli build",
"dev": "cross-env NODE_ENV=development nodemon --ignore dist/ --exec corenode-node ./src/index.js",
"build": "hermes build",
"dev": "cross-env NODE_ENV=development nodemon --ignore dist/ --exec hermes-node ./src/index.js",
"run:prod": "cross-env NODE_ENV=production node ./dist/index.js"
},
"shared": {
@ -20,7 +20,6 @@
"license": "MIT",
"dependencies": {
"7zip-min": "^1.4.4",
"@corenode/utils": "0.28.26",
"@foxify/events": "^2.1.0",
"@octokit/rest": "^19.0.7",
"axios": "^1.2.5",
@ -29,7 +28,6 @@
"comty.js": "^0.58.2",
"connect-mongo": "^4.6.0",
"content-range": "^2.0.2",
"corenode": "0.28.26",
"dotenv": "^16.0.3",
"form-data": "^4.0.0",
"formidable": "^2.1.1",
@ -54,6 +52,8 @@
"uglify-js": "^3.17.4"
},
"devDependencies": {
"@corenode/utils": "0.28.26",
"@ragestudio/hermes": "^0.1.0",
"chai": "^4.3.7",
"cross-env": "^7.0.3",
"mocha": "^10.2.0",

View File

@ -1,3 +1,4 @@
import path from "path"
global.FORCE_ENV = "prod"
require(path.resolve(process.cwd(), "../../shared/lib/api_wrapper"))

View File

@ -19,7 +19,7 @@ export default async function (req, res, next) {
return res.status(401).json({ error: "Unauthorized" })
}
req.session = validation.session
req.session = validation.data
return true
}

View File

@ -3,8 +3,8 @@
"version": "0.58.2",
"main": "dist/index.js",
"scripts": {
"build": "corenode-cli build",
"dev": "nodemon --ignore dist/ --exec corenode-node ./src/index.js",
"build": "hermes build",
"dev": "nodemon --ignore dist/ --exec hermes-node ./src/index.js",
"run:prod": "cross-env NODE_ENV=production node ./dist/index.js"
},
"shared": {
@ -21,10 +21,7 @@
"license": "MIT",
"dependencies": {
"@foxify/events": "^2.1.0",
"@socket.io/redis-adapter": "^8.2.1",
"@socket.io/redis-emitter": "^5.1.0",
"@socket.io/cluster-adapter": "^0.2.2",
"@socket.io/sticky": "^1.0.3",
"@ragestudio/hermes": "^0.1.0",
"axios": "^1.4.0",
"bcrypt": "5.0.1",
"comty.js": "^0.58.2",
@ -41,7 +38,6 @@
"moment-timezone": "0.5.37",
"mongoose": "^6.9.0",
"morgan": "^1.10.0",
"nanoid": "3.2.0",
"redis": "^4.6.6",
"socket.io": "^4.5.4"
},

View File

@ -1,3 +1,4 @@
import path from "path"
global.FORCE_ENV = "prod"
require(path.resolve(process.cwd(), "../../shared/lib/api_wrapper"))

View File

@ -19,7 +19,7 @@ export default async function (req, res, next) {
return res.status(401).json({ error: "Unauthorized" })
}
req.session = validation.session
req.session = validation.data
return next()
}

View File

@ -20,7 +20,7 @@ export default async function (req, res, next) {
}
req.sessionToken = auth
req.session = validation.session
req.session = validation.data
return next()
}

View File

@ -23,7 +23,7 @@ export default async (socket, next) => {
return next(new Error(`auth:token_invalid`))
}
const session = validation.session
const session = validation.data
const userData = await global.comty.rest.user.data({
user_id: session.user_id,

View File

@ -1,7 +0,0 @@
{
"workspaceId": "64519574a8a691b55e8b361d",
"defaultEnvironment": "dev",
"gitBranchToEnvironmentMapping": {
"master": "prod"
}
}

View File

@ -3,11 +3,12 @@
"version": "0.58.2",
"main": "dist/index.js",
"scripts": {
"build": "corenode-cli build",
"dev": "cross-env NODE_ENV=development nodemon --ignore dist/ --exec corenode-node ./src/index.js",
"build": "hermes build",
"dev": "cross-env NODE_ENV=development nodemon --ignore dist/ --exec hermes-node ./src/index.js",
"run:prod": "cross-env NODE_ENV=production node ./dist/index.js"
},
"shared": {
"classes/RTEngineServer": "src/shared-classes/RTEngineServer",
"classes/FileUpload": "src/shared-classes/FileUpload",
"classes/CacheService": "src/shared-classes/CacheService",
"classes/ComtyClient": "src/shared-classes/ComtyClient",
@ -19,51 +20,34 @@
},
"license": "MIT",
"dependencies": {
"@corenode/utils": "0.28.26",
"@foxify/events": "^2.1.0",
"@jimp/plugin-scale": "^0.22.7",
"@octokit/rest": "^19.0.7",
"@socket.io/cluster-adapter": "^0.2.2",
"@socket.io/redis-adapter": "^8.2.1",
"@socket.io/redis-emitter": "^5.1.0",
"@socket.io/sticky": "^1.0.3",
"@tensorflow/tfjs-node": "4.0.0",
"axios": "^1.2.5",
"bcrypt": "^5.1.0",
"busboy": "^1.6.0",
"comty.js": "^0.58.2",
"connect-mongo": "^4.6.0",
"content-range": "^2.0.2",
"corenode": "0.28.26",
"dicebar_lib": "1.0.1",
"dotenv": "^16.0.3",
"fluent-ffmpeg": "^2.1.2",
"formidable": "^2.1.1",
"infisical-node": "^1.2.1",
"jimp": "^0.16.2",
"jsonwebtoken": "^9.0.0",
"linebridge": "0.15.13",
"linebridge": "0.16.0",
"luxon": "^3.2.1",
"merge-files": "^0.1.2",
"mime-types": "^2.1.35",
"minio": "^7.0.32",
"moment": "^2.29.4",
"moment-timezone": "^0.5.40",
"mongoose": "^6.9.0",
"music-metadata": "^8.1.3",
"normalize-url": "^8.0.0",
"nsfwjs": "2.4.2",
"p-map": "4",
"p-queue": "^7.3.4",
"passport": "^0.6.0",
"passport-jwt": "^4.0.1",
"passport-local": "^1.0.0",
"path-to-regexp": "^6.2.1",
"peer": "^1.0.0",
"sharp": "^0.31.3",
"split-chunk-merge": "^1.0.0"
"path-to-regexp": "^6.2.1"
},
"devDependencies": {
"@ragestudio/hermes": "^0.1.0",
"@corenode/utils": "0.28.26",
"chai": "^4.3.7",
"cross-env": "^7.0.3",
"mocha": "^10.2.0",

View File

@ -1,314 +0,0 @@
import path from "path"
import fs from "fs"
import { Server } from "linebridge/dist/server"
import express from "express"
import bcrypt from "bcrypt"
import passport from "passport"
import jwt from "jsonwebtoken"
import EventEmitter from "@foxify/events"
import { User, Session, Config } from "@shared-classes/DbModels"
import DbManager from "@shared-classes/DbManager"
import RedisClient from "@shared-classes/RedisClient"
import StorageClient from "@shared-classes/StorageClient"
import internalEvents from "./events"
const ExtractJwt = require("passport-jwt").ExtractJwt
const LocalStrategy = require("passport-local").Strategy
global.signLocation = process.env.signLocation
export default class API {
server = global.server = new Server({
name: "Main-API",
minimal: true,
listen_port: process.env.MAIN_LISTEN_PORT ?? 3000,
onWSClientConnection: (...args) => {
this.onWSClientConnection(...args)
},
onWSClientDisconnect: (...args) => {
this.onWSClientDisconnect(...args)
},
},
require("@controllers"),
require("@middlewares"),
{
"Access-Control-Allow-Methods": "GET, POST, PUT, DELETE, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization, Content-Length, X-Requested-With, X-Access-Token, X-Refresh-Token, server_token",
"Access-Control-Expose-Headers": "regenerated_token",
},
)
redis = global.redis = RedisClient({
withWsAdapter: true
})
DB = new DbManager()
eventBus = global.eventBus = new EventEmitter()
storage = global.storage = StorageClient()
jwtStrategy = global.jwtStrategy = {
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
secretOrKey: process.env.SERVER_TOKEN ?? "secret",
algorithms: ["sha1", "RS256", "HS256"],
expiresIn: process.env.signLifetime ?? "1h",
enforceRegenerationTokenExpiration: false,
}
constructor() {
this.server.engine_instance.use(express.json())
this.server.engine_instance.use(express.urlencoded({ extended: true }))
this.server.websocket_instance["clients"] = []
this.server.websocket_instance["findUserIdFromClientID"] = (searchClientId) => {
return this.server.websocket_instance.clients.find(client => client.id === searchClientId)?.userId ?? false
}
this.server.websocket_instance["getClientSockets"] = (userId) => {
return this.server.websocket_instance.clients.filter(client => client.userId === userId).map((client) => {
return client?.socket
})
}
this.server.websocket_instance["broadcast"] = async (channel, ...args) => {
for await (const client of this.server.websocket_instance.clients) {
client.socket.emit(channel, ...args)
}
}
global.websocket_instance = this.server.websocket_instance
global.uploadCachePath = process.env.uploadCachePath ?? path.resolve(process.cwd(), "cache")
if (!fs.existsSync(global.uploadCachePath)) {
fs.mkdirSync(global.uploadCachePath, {
recursive: true,
})
}
global.DEFAULT_POSTING_POLICY = {
maxMessageLength: 512,
acceptedMimeTypes: [
"application/octet-stream",
"image/jpg",
"image/jpeg",
"image/png",
"image/gif",
"audio/mp3",
"audio/mpeg",
"audio/ogg",
"audio/wav",
"audio/flac",
"video/mp4",
"video/mkv",
"video/webm",
"video/quicktime",
"video/x-msvideo",
"video/x-ms-wmv",
],
maximumFileSize: 80 * 1024 * 1024,
maximunFilesPerRequest: 20,
}
// register internal events
for (const [eventName, eventHandler] of Object.entries(internalEvents)) {
this.eventBus.on(eventName, eventHandler)
}
}
events = internalEvents
async initialize() {
await this.redis.initialize()
await this.DB.initialize()
await this.initializeConfigDB()
await this.storage.initialize()
await this.checkSetup()
await this.initPassport()
await this.initWebsockets()
await this.server.initialize()
}
initializeConfigDB = async () => {
let serverConfig = await Config.findOne({ key: "server" }).catch(() => {
return false
})
if (!serverConfig) {
serverConfig = new Config({
key: "server",
value: {
setup: false,
},
})
await serverConfig.save()
}
}
checkSetup = async () => {
return new Promise(async (resolve, reject) => {
let setupOk = (await Config.findOne({ key: "server" })).value?.setup ?? false
if (!setupOk) {
console.log("⚠️ Server setup is not complete, running setup proccess.")
let setupScript = await import("./setup")
setupScript = setupScript.default ?? setupScript
try {
for await (let script of setupScript) {
await script()
}
console.log("✅ Server setup complete.")
await Config.updateOne({ key: "server" }, { value: { setup: true } })
return resolve()
} catch (error) {
console.log("❌ Server setup failed.")
console.error(error)
process.exit(1)
}
}
return resolve()
})
}
initPassport() {
this.server.middlewares["useJwtStrategy"] = (req, res, next) => {
req.jwtStrategy = this.jwtStrategy
next()
}
passport.use(new LocalStrategy({
usernameField: "username",
passwordField: "password",
session: false
}, (username, password, done) => {
// check if username is a email with regex
let isEmail = username.match(/^[^\s@]+@[^\s@]+\.[^\s@]+$/)
let query = isEmail ? { email: username } : { username: username }
User.findOne(query).select("+password")
.then((data) => {
if (data === null) {
return done(null, false, this.jwtStrategy)
} else if (!bcrypt.compareSync(password, data.password)) {
return done(null, false, this.jwtStrategy)
}
// create a token
return done(null, data, this.jwtStrategy, { username, password })
})
.catch(err => done(err, null, this.jwtStrategy))
}))
this.server.engine_instance.use(passport.initialize())
}
initWebsockets() {
const onAuthenticated = async (socket, userData) => {
await this.attachClientSocket(socket, userData)
return socket.emit("authenticated")
}
const onAuthenticatedFailed = async (socket, error) => {
await this.detachClientSocket(socket)
return socket.emit("authenticateFailed", {
error,
})
}
if (this.redis.ioAdapter) {
this.server.websocket_instance.io.adapter(this.redis.ioAdapter)
}
this.server.websocket_instance.eventsChannels.push(["/main", "ping", async (socket) => {
return socket.emit("pong")
}])
this.server.websocket_instance.eventsChannels.push(["/main", "authenticate", async (socket, authPayload) => {
if (!authPayload) {
return onAuthenticatedFailed(socket, "missing_auth_payload")
}
const session = await Session.findOne({ token: authPayload.token }).catch((err) => {
return false
})
if (!session) {
return onAuthenticatedFailed(socket, "Session not found")
}
await jwt.verify(authPayload.token, this.jwtStrategy.secretOrKey, async (err, decoded) => {
if (err) {
return onAuthenticatedFailed(socket, err)
}
const userData = await User.findById(decoded.user_id).catch((err) => {
return false
})
if (!userData) {
return onAuthenticatedFailed(socket, "User not found")
}
return onAuthenticated(socket, userData)
})
}])
}
onWSClientConnection = async (socket) => {
console.log(`🌐 Client connected: ${socket.id}`)
}
onWSClientDisconnect = async (socket) => {
console.log(`🌐 Client disconnected: ${socket.id}`)
this.detachClientSocket(socket)
}
attachClientSocket = async (socket, userData) => {
const client = this.server.websocket_instance.clients.find(c => c.id === socket.id)
if (client) {
client.socket.disconnect()
}
const clientObj = {
id: socket.id,
socket: socket,
user_id: userData._id.toString(),
}
this.server.websocket_instance.clients.push(clientObj)
console.log(`📣 Client [${socket.id}] authenticated as ${userData.username}`)
this.eventBus.emit("user.connected", clientObj.user_id)
}
detachClientSocket = async (socket) => {
const client = this.server.websocket_instance.clients.find(c => c.id === socket.id)
if (client) {
this.server.websocket_instance.clients = this.server.websocket_instance.clients.filter(c => c.id !== socket.id)
console.log(`📣🔴 Client [${socket.id}] authenticated as ${client.user_id} disconnected`)
this.eventBus.emit("user.disconnected", client.user_id)
}
}
}

View File

@ -23,10 +23,10 @@ export default {
await user.save()
global.websocket_instance.io.emit(`user.update`, {
global.engine.ws.io.of("/").emit(`user.update`, {
...user.toObject(),
})
global.websocket_instance.io.emit(`user.update.${targetUserId}`, {
global.engine.ws.io.of("/").emit(`user.update.${targetUserId}`, {
...user.toObject(),
})

View File

@ -1,32 +1,39 @@
import passport from "passport"
import { Token } from "@lib"
import Token from "@lib/token"
import { User } from "@shared-classes/DbModels"
import bcrypt from "bcrypt"
export default {
method: "POST",
route: "/login",
fn: async (req, res) => {
passport.authenticate("local", { session: false }, async (error, user, options) => {
if (error) {
return res.status(500).json({
message: `Error validating user > ${error.message}`,
const { username, password } = req.body
let isEmail = username.match(/^[^\s@]+@[^\s@]+\.[^\s@]+$/)
let query = isEmail ? { email: username } : { username: username }
const user = await User.findOne(query).select("+password")
if (!user) {
return res.status(401).json({
message: "Invalid credentials, user not found",
})
}
if (!user) {
if (!bcrypt.compareSync(password, user.password)) {
return res.status(401).json({
message: "Invalid credentials",
})
}
const token = await Token.createNewAuthToken({
const token = await Token.createAuth({
username: user.username,
user_id: user._id.toString(),
ip_address: req.headers["x-forwarded-for"]?.split(",")[0] ?? req.socket.remoteAddress,
client: req.headers["user-agent"],
signLocation: global.signLocation,
}, options)
})
return res.json({ token: token })
})(req, res)
}
}

View File

@ -13,7 +13,7 @@ export default {
const { message } = req.selection
try {
const comment = newComment({
const comment = await newComment({
user_id: req.user._id.toString(),
parent_id: post_id,
message: message,

View File

@ -26,8 +26,7 @@ export default async (payload) => {
await comment.delete()
global.websocket_instance.io.emit(`comment.delete.${comment_id}`)
global.websocket_instance.io.emit(`post.delete.comment.${comment.parent_id.toString()}`, comment_id)
global.engine.ws.io.of("/").emit(`post.delete.comment.${comment.parent_id.toString()}`, comment_id)
return comment
}

View File

@ -25,12 +25,7 @@ export default async (payload) => {
const userData = await User.findById(user_id)
global.websocket_instance.io.emit(`comment.new.${parent_id}`, {
...comment.toObject(),
user: userData.toObject(),
})
global.websocket_instance.io.emit(`post.new.comment.${parent_id}`, {
global.engine.ws.io.of("/").emit(`post.new.comment.${parent_id}`, {
...comment.toObject(),
user: userData.toObject(),
})

View File

@ -1,268 +0,0 @@
import fs from "fs"
import { Controller } from "linebridge/dist/server"
import ChunkedUpload from "@lib/chunkedUpload"
import uploadBodyFiles from "./services/uploadBodyFiles"
import { videoTranscode } from "@lib/videoTranscode"
import Jimp from "jimp"
const maximuns = {
imageResolution: {
width: 3840,
height: 2160,
},
imageQuality: 80,
}
async function processVideo(file, params = {}) {
const result = await videoTranscode(file.filepath, global.uploadCachePath, {
videoCodec: "libx264",
format: "mp4",
...params
})
file.filepath = result.filepath
file.filename = result.filename
return file
}
async function processImage(file) {
const { width, height } = await new Promise((resolve, reject) => {
Jimp.read(file.filepath)
.then((image) => {
resolve({
width: image.bitmap.width,
height: image.bitmap.height,
})
})
.catch((err) => {
reject(err)
})
})
if (width > maximuns.imageResolution.width || height > maximuns.imageResolution.height) {
await new Promise((resolve, reject) => {
// calculate max resolution respecting aspect ratio
const resizedResolution = {
width: maximuns.imageResolution.width,
height: maximuns.imageResolution.height,
}
if (width > height) {
resizedResolution.height = Math.floor((height / width) * maximuns.imageResolution.width)
}
if (height > width) {
resizedResolution.width = Math.floor((width / height) * maximuns.imageResolution.height)
}
Jimp.read(file.filepath)
.then((image) => {
image
.resize(resizedResolution.width, resizedResolution.height)
.quality(maximuns.imageQuality)
.write(file.filepath, resolve)
})
.catch((err) => {
reject(err)
})
})
}
return file
}
export default class FilesController extends Controller {
static refName = "FilesController"
static useRoute = "/files"
chunkUploadEngine = new ChunkedUpload({
tmpPath: global.uploadCachePath,
outputPath: global.uploadCachePath,
maxFileSize: global.DEFAULT_POSTING_POLICY.maximumFileSize,
acceptedMimeTypes: global.DEFAULT_POSTING_POLICY.acceptedMimeTypes,
onExceedMaxFileSize: (req) => {
// check if user has permission to upload big files
if (!req.user) {
return false
}
return req.user.roles.includes("admin") || req.user.roles.includes("moderator") || req.user.roles.includes("developer")
}
})
fileTransformer = {
"video/avi": processVideo,
"video/quicktime": processVideo,
"video/mp4": processVideo,
"video/webm": processVideo,
"image/jpeg": processImage,
"image/png": processImage,
"image/gif": processImage,
"image/bmp": processImage,
"image/tiff": processImage,
"image/webp": processImage,
"image/jfif": processImage,
}
httpEndpoints = {
get: {
"/objects": {
middlewares: ["withAuthentication"],
fn: async (req, res) => {
const user_id = req.user.id
let totalSize = 0
const objectsPath = `${user_id}/`
const objects = await new Promise((resolve, reject) => {
const objects = []
const objectsStream = global.storage.listObjects(global.storage.defaultBucket, objectsPath, true)
objectsStream.on("data", (obj) => {
objects.push(obj)
})
objectsStream.on("error", (err) => {
return reject(err)
})
objectsStream.on("end", () => {
return resolve(objects)
})
})
for await (const object of objects) {
totalSize += object.size
}
return res.json({
totalSize,
objects,
})
}
}
},
post: {
"/upload_chunk": {
middlewares: ["withAuthentication", this.chunkUploadEngine.makeMiddleware()],
fn: async (req, res) => {
if (!req.isLastPart) {
return res.json({
status: "ok",
filePart: req.filePart,
lastPart: req.isLastPart,
})
}
if (!req.fileResult) {
return res.status(500).json({
error: "File upload failed",
})
}
console.log(req.fileResult)
try {
// check if mimetype has transformer
if (typeof this.fileTransformer[req.fileResult.mimetype] === "function") {
req.fileResult = await this.fileTransformer[req.fileResult.mimetype](req.fileResult)
}
} catch (error) {
console.log(error)
return res.status(500).json({
error: "File upload failed on transformation",
reason: error.message,
})
}
// start upload to s3
const remoteUploadPath = req.user?._id ? `${req.user?._id.toString()}/${req.fileResult.filename}` : file.filename
const remoteUploadResponse = await new Promise((_resolve, _reject) => {
try {
const fileStream = fs.createReadStream(req.fileResult.filepath)
fs.stat(req.fileResult.filepath, (err, stats) => {
try {
if (err) {
return _reject(new Error(`Failed to upload file to storage server > ${err.message}`))
}
global.storage.putObject(global.storage.defaultBucket, remoteUploadPath, fileStream, stats.size, req.fileResult, (err, etag) => {
if (err) {
return _reject(new Error(`Failed to upload file to storage server > ${err.message}`))
}
return _resolve({
etag,
})
})
} catch (error) {
return _reject(new Error(`Failed to upload file to storage server > ${error.message}`))
}
})
} catch (error) {
return _reject(new Error(`Failed to upload file to storage server > ${error.message}`))
}
}).catch((err) => {
res.status(500).json({
error: err.message,
})
return false
})
if (!remoteUploadResponse) {
return false
}
try {
// remove file from cache
await fs.promises.unlink(req.fileResult.filepath)
// programatically remove file from cache in the
} catch (error) {
console.log("Failed to remove file from cache", error)
return res.status(500).json({
error: error.message,
})
}
// get url location
const remoteUrlObj = global.storage.composeRemoteURL(remoteUploadPath)
return res.json({
name: req.fileResult.filename,
id: remoteUploadPath,
url: remoteUrlObj,
})
}
},
"/upload": {
middlewares: ["withAuthentication"],
fn: async (req, res) => {
const results = await uploadBodyFiles({
req,
}).catch((err) => {
res.status(400).json({
error: err.message,
})
return false
})
if (results) {
return res.json(results)
}
}
}
}
}
}

View File

@ -1,247 +0,0 @@
import path from "path"
import fs from "fs"
import { videoTranscode } from "@lib/videoTranscode"
import Jimp from "jimp"
import mime from "mime-types"
import pmap from "@utils/pMap"
const formidable = require("formidable")
const maximuns = {
imageResolution: {
width: 3840,
height: 2160,
},
imageQuality: 80,
}
const handleUploadVideo = async (file, params) => {
const transcoded = await videoTranscode(file.filepath, params.cacheUploadDir)
file.filepath = transcoded.filepath
file.newFilename = path.basename(file.filepath)
return file
}
const handleImage = async (file) => {
const { width, height } = await new Promise((resolve, reject) => {
Jimp.read(file.filepath)
.then((image) => {
resolve({
width: image.bitmap.width,
height: image.bitmap.height,
})
})
.catch((err) => {
reject(err)
})
})
if (width > maximuns.imageResolution.width || height > maximuns.imageResolution.height) {
await new Promise((resolve, reject) => {
Jimp.read(file.filepath)
.then((image) => {
image
.resize(maximuns.imageResolution.width, maximuns.imageResolution.height)
.quality(maximuns.imageQuality)
.write(file.filepath, resolve)
})
.catch((err) => {
reject(err)
})
})
}
file.newFilename = path.basename(file.filepath)
return file
}
export default async (payload) => {
if (!payload) {
throw new Error("Missing payload")
}
const { req } = payload
let params = {
cacheUploadDir: global.uploadCachePath,
maxFileSize: global.DEFAULT_POSTING_POLICY.maximumFileSize,
maxFields: global.DEFAULT_POSTING_POLICY.maximunFilesPerRequest,
acceptedMimeTypes: global.DEFAULT_POSTING_POLICY.acceptedMimeTypes,
}
if (payload.params) {
params = {
...params,
...payload.params,
}
}
const processedFiles = []
const failedFiles = []
let queuePromieses = []
// check directories exist
if (!fs.existsSync(params.cacheUploadDir)) {
await fs.promises.mkdir(params.cacheUploadDir, { recursive: true })
}
// decode body form-data
const form = formidable({
multiples: true,
keepExtensions: true,
uploadDir: params.cacheUploadDir,
maxFileSize: params.maxFileSize,
maxFields: params.maxFields,
filename: (name, ext, part, form) => {
if (!ext) {
ext = `.${mime.extension(part.mimetype)}`
}
name = global.nanoid()
return name + ext
},
filter: (stream) => {
// check if is allowed mime type
if (!params.acceptedMimeTypes.includes(stream.mimetype)) {
failedFiles.push({
fileName: stream.originalFilename,
mimetype: stream.mimetype,
error: "File type not allowed",
})
return false
}
return true
}
})
const results = await new Promise((resolve, reject) => {
// create a new thread for each file
form.parse(req, async (err, fields, data) => {
if (err) {
return reject(err)
}
if (!Array.isArray(data.files)) {
data.files = [data.files]
}
for (let file of data.files) {
if (!file) continue
// create process queue
queuePromieses.push(async () => {
// check if is video need to transcode
switch (file.mimetype) {
case "video/quicktime": {
file = await handleUploadVideo(file, params)
break
}
case "image/jpeg": {
file = await handleImage(file, params)
break
}
case "image/png": {
file = await handleImage(file, params)
break
}
case "image/gif": {
file = await handleImage(file, params)
break
}
case "image/bmp": {
file = await handleImage(file, params)
break
}
case "image/tiff": {
file = await handleImage(file, params)
break
}
case "image/webp": {
file = await handleImage(file, params)
break
}
case "image/jfif": {
file = await handleImage(file, params)
break
}
default: {
// do nothing
}
}
const metadata = {
mimetype: file.mimetype,
size: file.size,
filepath: file.filepath,
filename: file.newFilename,
}
// upload path must be user_id + file.newFilename
const uploadPath = req.user?._id ? `${req.user?._id.toString()}/${file.newFilename}` : file.newFilename
// upload to s3
await new Promise((_resolve, _reject) => {
global.storage.fPutObject(global.storage.defaultBucket, uploadPath, file.filepath, metadata, (err, etag) => {
if (err) {
return _reject(new Error(`Failed to upload file to storage server > ${err.message}`))
}
return _resolve()
})
}).catch((err) => {
return reject(err)
})
// get url location
const remoteUrlObj = global.storage.composeRemoteURL(uploadPath)
// push final filepath to urls
return {
name: file.originalFilename,
id: uploadPath,
url: remoteUrlObj,
}
})
}
// wait for all files to be processed
await pmap(
queuePromieses,
async (fn) => {
const result = await fn().catch((err) => {
console.error(err)
// FIXME: add fileNames
failedFiles.push({
error: err.message,
})
return null
})
if (result) {
processedFiles.push(result)
}
},
{ concurrency: 10 }
)
return resolve({
files: processedFiles,
failed: failedFiles,
})
})
})
return results
}

View File

@ -30,10 +30,10 @@ export default async (payload) => {
await newFollow.save()
global.websocket_instance.io.emit(`user.follow`, {
global.engine.ws.io.of("/").emit(`user.follow`, {
...user.toObject(),
})
global.websocket_instance.io.emit(`user.follow.${payload.user_id}`, {
global.engine.ws.io.of("/").emit(`user.follow.${payload.user_id}`, {
...user.toObject(),
})

View File

@ -25,10 +25,10 @@ export default async (payload) => {
await follow.remove()
global.websocket_instance.io.emit(`user.unfollow`, {
global.engine.ws.io.of("/").emit(`user.unfollow`, {
...user.toObject(),
})
global.websocket_instance.io.emit(`user.unfollow.${payload.user_id}`, {
global.engine.ws.io.of("/").emit(`user.unfollow.${payload.user_id}`, {
...user.toObject(),
})

View File

@ -36,8 +36,8 @@ export default async (payload) => {
const resultPost = await getPostData({ post_id: post._id.toString() })
global.websocket_instance.io.emit(`post.new`, resultPost)
global.websocket_instance.io.emit(`post.new.${post.user_id}`, resultPost)
global.engine.ws.io.of("/").emit(`post.new`, resultPost)
global.engine.ws.io.of("/").emit(`post.new.${post.user_id}`, resultPost)
// push to background job to check if is NSFW
flagNsfwByAttachments(post._id.toString())

View File

@ -27,7 +27,7 @@ export default async (payload) => {
}
await post.remove()
global.websocket_instance.io.emit(`post.delete`, post_id)
global.engine.ws.io.of("/").emit(`post.delete`, post_id)
return post.toObject()
}

View File

@ -24,8 +24,8 @@ export default async (post_id, modification) => {
}
}
global.websocket_instance.io.emit(`post.dataUpdate`, post)
global.websocket_instance.io.emit(`post.dataUpdate.${post_id}`, post)
global.engine.ws.io.of("/").emit(`post.dataUpdate`, post)
global.engine.ws.io.of("/").emit(`post.dataUpdate.${post_id}`, post)
return post
}

View File

@ -27,7 +27,7 @@ export default async (payload) => {
await PostLike.findByIdAndDelete(likeObj._id)
}
global.websocket_instance.io.emit(`post.${post_id}.likes.update`, {
global.engine.ws.io.of("/").emit(`post.${post_id}.likes.update`, {
to,
post_id,
user_id,

View File

@ -1,13 +1,12 @@
import { Token } from "@lib"
import Token from "@lib/token"
export default {
method: "POST",
route: "/regenerate",
middlewares: ["useJwtStrategy"],
fn: async (req, res) => {
const { expiredToken, refreshToken } = req.body
const token = await Token.regenerateSession(expiredToken, refreshToken).catch((error) => {
const token = await Token.regenerate(expiredToken, refreshToken).catch((error) => {
res.status(400).json({ error: error.message })
return null

View File

@ -1,6 +1,4 @@
import jwt from "jsonwebtoken"
import { Session } from "@shared-classes/DbModels"
import Token from "@lib/token"
export default {
method: "POST",
@ -8,44 +6,7 @@ export default {
fn: async (req, res) => {
const token = req.body.token
let result = {
expired: false,
valid: true,
session: null
}
await jwt.verify(token, global.jwtStrategy.secretOrKey, async (err, decoded) => {
if (err) {
result.valid = false
result.error = err.message
if (err.message === "jwt expired") {
result.expired = true
}
return
}
result = { ...result, ...decoded }
const sessions = await Session.find({ user_id: result.user_id })
const sessionsTokens = sessions.map((session) => {
if (session.user_id === result.user_id) {
return session.token
}
})
if (!sessionsTokens.includes(token)) {
result.valid = false
result.error = "Session token not found"
} else {
result.valid = true
}
})
if (result.valid) {
result.session = await jwt.decode(token)
}
const result = await Token.validate(token)
return res.json(result)
},

View File

@ -1,25 +1,28 @@
import { UserFollow } from "@shared-classes/DbModels"
export default async (payload = {}) => {
const { from_user_id } = payload
const { from_user_id, limit = 10, offset = 0 } = payload
// TODO: Sort by latest history interaction
// get all the users that are following
const following = await UserFollow.find({
let followingUsersIds = await UserFollow.find({
user_id: from_user_id,
})
// .skip(offset)
// .limit(limit)
// check if following users are connected
const connectedUsers = []
following.forEach((follow) => {
const connectedClient = global.websocket_instance.clients.find((client) => {
return client.user_id === follow.to
followingUsersIds = followingUsersIds.map((follow) => {
return follow.to
})
if (connectedClient) {
connectedUsers.push(connectedClient.user_id)
const searchResult = await global.engine.ws.find.manyById(followingUsersIds)
// TODO: Calculate last session duration or last activity at
return searchResult.map((user) => {
return {
_id: user.user_id,
username: user.username,
}
})
return connectedUsers
}

View File

@ -46,9 +46,9 @@ export default {
})
}
global.websocket_instance.io.emit(`streaming.new`, streamingProfile)
global.engine.ws.io.of("/").emit(`streaming.new`, streamingProfile)
global.websocket_instance.io.emit(`streaming.new.${streamingProfile.user_id}`, streamingProfile)
global.engine.ws.io.of("/").emit(`streaming.new.${streamingProfile.user_id}`, streamingProfile)
return res.json({
code: 0,

View File

@ -11,9 +11,9 @@ export default {
})
if (streamingProfile) {
global.websocket_instance.io.emit(`streaming.end`, streamingProfile)
global.engine.ws.io.of("/").emit(`streaming.end`, streamingProfile)
global.websocket_instance.io.emit(`streaming.end.${streamingProfile.user_id}`, streamingProfile)
global.engine.ws.io.of("/").emit(`streaming.end.${streamingProfile.user_id}`, streamingProfile)
return res.json({
code: 0,

View File

@ -22,10 +22,10 @@ export default async (payload) => {
await user.save()
global.websocket_instance.io.emit(`user.update`, {
global.engine.ws.io.of("/").emit(`user.update`, {
...user.toObject(),
})
global.websocket_instance.io.emit(`user.update.${payload.user_id}`, {
global.engine.ws.io.of("/").emit(`user.update.${payload.user_id}`, {
...user.toObject(),
})

View File

@ -16,7 +16,6 @@ export { default as StreamingController } from "./StreamingController"
export { default as BadgesController } from "./BadgesController"
export { default as FeaturedEventsController } from "./FeaturedEventsController" // Needs to migrate to lb 0.15
export { default as FilesController } from "./FilesController" // Needs to migrate to lb 0.15
export { default as RolesController } from "./RolesController" // Needs to migrate to lb 0.15
export { default as SearchController } from "./SearchController" // Needs to migrate to lb 0.15

View File

@ -1,3 +1,153 @@
import path from "path"
import Boot from "linebridge/bootstrap"
import { Server } from "linebridge/dist/server"
require(path.resolve(process.cwd(), "../../shared/lib/api_wrapper"))
import EventEmitter from "@foxify/events"
import { Config, User } from "@shared-classes/DbModels"
import DbManager from "@shared-classes/DbManager"
import RedisClient from "@shared-classes/RedisClient"
import StorageClient from "@shared-classes/StorageClient"
import Token from "@lib/token"
import internalEvents from "./events"
export default class API extends Server {
static refName = "MAIN-API"
static listen_port = 3010
static requireWSAuth = true
constructor(params) {
super(params)
global.DEFAULT_POSTING_POLICY = {
maxMessageLength: 512,
maximumFileSize: 80 * 1024 * 1024,
maximunFilesPerRequest: 20,
}
global.jwtStrategy = {
secretOrKey: process.env.JWT_SECRET,
expiresIn: "1h",
algorithm: "HS256",
enforceRegenerationTokenExpiration: false,
}
}
middlewares = require("@middlewares")
controllers = require("@controllers")
redis = global.redis = RedisClient({
withWsAdapter: true
})
DB = new DbManager()
eventBus = new EventEmitter()
storage = global.storage = StorageClient()
events = internalEvents
async onInitialize() {
for (const [eventName, eventHandler] of Object.entries(internalEvents)) {
this.eventBus.on(eventName, eventHandler)
}
await this.redis.initialize()
await this.DB.initialize()
await this.initializeConfigDB()
await this.storage.initialize()
await this.checkSetup()
}
initializeConfigDB = async () => {
let serverConfig = await Config.findOne({ key: "server" }).catch(() => {
return false
})
if (!serverConfig) {
serverConfig = new Config({
key: "server",
value: {
setup: false,
},
})
await serverConfig.save()
}
}
checkSetup = async () => {
return new Promise(async (resolve, reject) => {
let setupOk = (await Config.findOne({ key: "server" })).value?.setup ?? false
if (!setupOk) {
console.log("⚠️ Server setup is not complete, running setup proccess.")
let setupScript = await import("./setup")
setupScript = setupScript.default ?? setupScript
try {
for await (let script of setupScript) {
await script()
}
console.log("✅ Server setup complete.")
await Config.updateOne({ key: "server" }, { value: { setup: true } })
return resolve()
} catch (error) {
console.log("❌ Server setup failed.")
console.error(error)
process.exit(1)
}
}
return resolve()
})
}
handleWsAuth = async (socket, token, err) => {
try {
const validation = await Token.validate(token)
if (!validation.valid) {
if (validation.error) {
return err(`auth:server_error`)
}
return err(`auth:token_invalid`)
}
const userData = await User.findById(validation.data.user_id).catch((err) => {
console.error(`[${socket.id}] failed to get user data caused by server error`, err)
return null
})
if (!userData) {
return err(`auth:user_failed`)
}
socket.userData = userData
socket.token = token
socket.session = validation.data
return {
token: token,
username: userData.username,
user_id: userData._id,
}
} catch (error) {
return err(`auth:authentification_failed`, error)
}
}
}
Boot(API)

View File

@ -1,7 +1,55 @@
import jwt from "jsonwebtoken"
import { Session, RegenerationToken } from "@shared-classes/DbModels"
export async function regenerateSession(expiredToken, refreshToken, aggregateData = {}) {
export default class Token {
static async validate(token) {
if (typeof token === "undefined") {
throw new Error("Token is undefined")
}
let result = {
expired: false,
valid: true,
data: null
}
await jwt.verify(token, global.jwtStrategy.secretOrKey, async (err, decoded) => {
if (err) {
result.valid = false
result.error = err.message
if (err.message === "jwt expired") {
result.expired = true
}
return
}
result = { ...result, ...decoded }
const sessions = await Session.find({ user_id: result.user_id })
const sessionsTokens = sessions.map((session) => {
if (session.user_id === result.user_id) {
return session.token
}
})
if (!sessionsTokens.includes(token)) {
result.valid = false
result.error = "Session token not found"
} else {
result.valid = true
}
})
if (result.valid) {
result.data = await jwt.decode(token)
}
return result
}
static async regenerate(expiredToken, refreshToken, aggregateData = {}) {
// search for a regeneration token with the expired token (Should exist only one)
const regenerationToken = await RegenerationToken.findOne({ refreshToken: refreshToken })
@ -73,15 +121,50 @@ export async function regenerateSession(expiredToken, refreshToken, aggregateDat
await RegenerationToken.deleteOne({ refreshToken: refreshToken })
return newToken
}
}
export async function getRegenerationToken(expiredToken) {
const regenerationToken = await RegenerationToken.findOne({ expiredToken }).catch((error) => false)
static async createAuth(payload, options = {}) {
if (options.updateSession) {
const sessionData = await Session.findOne({ _id: options.updateSession })
return regenerationToken ?? false
}
payload.session_uuid = sessionData.session_uuid
} else {
payload.session_uuid = global.nanoid()
}
export async function createNewRegenerationToken(expiredToken) {
const token = jwt.sign({
session_uuid: payload.session_uuid,
username: payload.username,
user_id: payload.user_id,
signLocation: payload.signLocation,
}, global.jwtStrategy.secretOrKey, {
expiresIn: global.jwtStrategy.expiresIn ?? "1h",
algorithm: global.jwtStrategy.algorithm ?? "HS256"
})
const session = {
token: token,
session_uuid: payload.session_uuid,
username: payload.username,
user_id: payload.user_id,
location: payload.signLocation,
ip_address: payload.ip_address,
client: payload.client,
date: new Date().getTime(),
}
if (options.updateSession) {
await Session.findByIdAndUpdate(options.updateSession, session)
} else {
let newSession = new Session(session)
await newSession.save()
}
return token
}
static async createRegenerative(expiredToken) {
// check if token is only expired, if is corrupted, reject
let decoded = null
@ -124,45 +207,11 @@ export async function createNewRegenerationToken(expiredToken) {
// return the regeneration token
return regenerationToken
}
export async function createNewAuthToken(payload, options = {}) {
if (options.updateSession) {
const sessionData = await Session.findOne({ _id: options.updateSession })
payload.session_uuid = sessionData.session_uuid
} else {
payload.session_uuid = global.nanoid()
}
const token = jwt.sign({
session_uuid: payload.session_uuid,
username: payload.username,
user_id: payload.user_id,
signLocation: payload.signLocation,
}, global.jwtStrategy.secretOrKey, {
expiresIn: global.jwtStrategy.expiresIn ?? "1h",
algorithm: global.jwtStrategy.algorithm ?? "HS256"
})
const session = {
token: token,
session_uuid: payload.session_uuid,
username: payload.username,
user_id: payload.user_id,
location: payload.signLocation,
ip_address: payload.ip_address,
client: payload.client,
date: new Date().getTime(),
}
if (options.updateSession) {
await Session.findByIdAndUpdate(options.updateSession, session)
} else {
let newSession = new Session(session)
await newSession.save()
}
return token
}
static async getRegenerationToken(expiredToken) {
const regenerationToken = await RegenerationToken.findOne({ expiredToken })
return regenerationToken
}
}

View File

@ -0,0 +1 @@
/opt/comty-federated/@public/shared/classes/RTEngineServer

View File

@ -1,4 +1,4 @@
import { Token } from "@lib"
import Token from "@lib/token"
export default async (expiredToken) => {
let regenerationToken = null
@ -10,7 +10,7 @@ export default async (expiredToken) => {
regenerationToken = associatedRegenerationToken.refreshToken
} else {
// create a new regeneration token with the expired token
regenerationToken = await Token.createNewRegenerationToken(expiredToken)
regenerationToken = await Token.createRegenerative(expiredToken)
}
return regenerationToken.refreshToken

View File

@ -3,8 +3,8 @@
"version": "0.58.2",
"main": "dist/index.js",
"scripts": {
"build": "corenode-cli build",
"dev": "cross-env NODE_ENV=development nodemon --ignore dist/ --exec corenode-node ./src/index.js",
"build": "hermes build",
"dev": "cross-env NODE_ENV=development nodemon --ignore dist/ --exec hermes-node ./src/index.js",
"run:prod": "cross-env NODE_ENV=production node ./dist/index.js"
},
"shared": {
@ -21,43 +21,29 @@
},
"license": "MIT",
"dependencies": {
"7zip-min": "^1.4.4",
"@corenode/utils": "0.28.26",
"@foxify/events": "^2.1.0",
"@octokit/rest": "^19.0.7",
"axios": "^1.2.5",
"bcrypt": "^5.1.0",
"busboy": "^1.6.0",
"comty.js": "^0.58.2",
"connect-mongo": "^4.6.0",
"content-range": "^2.0.2",
"corenode": "0.28.26",
"dotenv": "^16.0.3",
"file-api": "^0.10.4",
"form-data": "^4.0.0",
"formidable": "^2.1.1",
"hyper-express": "^6.5.9",
"jsonwebtoken": "^9.0.0",
"linebridge": "0.15.12",
"live-directory": "^3.0.3",
"luxon": "^3.2.1",
"merge-files": "^0.1.2",
"mime-types": "^2.1.35",
"minio": "^7.0.32",
"moment": "^2.29.4",
"moment-timezone": "^0.5.40",
"mongoose": "^6.9.0",
"normalize-url": "^8.0.0",
"p-map": "^6.0.0",
"p-queue": "^7.3.4",
"qs": "^6.11.2",
"redis": "^4.6.6",
"sharp": "^0.31.3",
"split-chunk-merge": "^1.0.0",
"sucrase": "^3.32.0",
"uglify-js": "^3.17.4"
"redis": "^4.6.6"
},
"devDependencies": {
"@corenode/utils": "0.28.26",
"chai": "^4.3.7",
"corenode": "0.28.26",
"cross-env": "^7.0.3",
"mocha": "^10.2.0",
"nodemon": "^2.0.15"

View File

@ -1,3 +1,4 @@
import path from "path"
global.FORCE_ENV = "prod"
require(path.resolve(process.cwd(), "../../shared/lib/api_wrapper"))

View File

@ -19,7 +19,7 @@ export default async function (req, res, next) {
return res.status(401).json({ error: "Unauthorized" })
}
req.session = validation.session
req.session = validation.data
return next()
}

View File

@ -20,7 +20,7 @@ export default async function (req, res, next) {
}
req.sessionToken = auth
req.session = validation.session
req.session = validation.data
return next()
}

View File

@ -23,7 +23,7 @@ export default async (socket, next) => {
return next(new Error(`auth:token_invalid`))
}
const session = validation.session
const session = validation.data
const userData = await global.comty.rest.user.data({
user_id: session.user_id,

View File

@ -19,7 +19,7 @@ export default async function (req, res, next) {
return res.status(401).json({ error: "Unauthorized" })
}
req.session = validation.session
req.session = validation.data
return true
}

View File

@ -4,7 +4,7 @@
"main": "./src/index.js",
"license": "MIT",
"scripts": {
"dev": "corenode-node ./src/index.js"
"dev": "hermes-node ./src/index.js"
},
"dependencies": {
"7zip-min": "^1.4.3",
@ -19,6 +19,6 @@
"pm2": "^5.3.0"
},
"devDependencies": {
"corenode": "^0.28.26"
"@ragestudio/hermes": "^0.1.1"
}
}

View File

@ -56,17 +56,11 @@ async function linkSharedResources(pkgJSON, packagePath) {
}
}
async function initializeEvite() {
async function linkInternalSubmodules(packages) {
const appPath = path.resolve(rootPath, pkgjson._web_app_path)
const evitePath = path.resolve(rootPath, "evite")
console.log("📦 Initializing Evite...")
// console.log(`Intalling Evite dependencies...`)
// await child_process.execSync("yarn install", {
// cwd: evitePath,
// stdio: "inherit",
// })
const linebridePath = path.resolve(rootPath, "linebridge")
console.log(`Linking Evite to app...`)
await child_process.execSync("yarn link", {
@ -79,6 +73,30 @@ async function initializeEvite() {
stdio: "inherit",
})
console.log(`Linking Linebride to servers...`)
await child_process.execSync(`yarn link`, {
cwd: linebridePath,
stdio: "inherit",
})
for await (const packageName of packages) {
const packagePath = path.resolve(packagesPath, packageName)
const packageJsonPath = path.resolve(packagePath, "package.json")
if (!fs.existsSync(packageJsonPath)) {
continue
}
await child_process.execSync(`yarn link "linebridge"`, {
cwd: packagePath,
stdio: "inherit",
})
console.log(`Linking Linebride to package [${packageName}]...`)
}
console.log(`✅ Evite dependencies installed`)
return true
@ -87,18 +105,18 @@ async function initializeEvite() {
async function main() {
console.time("✅ post-install tooks:")
await initializeEvite()
// read dir with absolute paths
let packages = await getPackages()
await linkInternalSubmodules(packages)
console.log("Rebuilding TFJS...")
await child_process.execSync("npm rebuild @tensorflow/tfjs-node --build-from-source &&", {
await child_process.execSync("npm rebuild @tensorflow/tfjs-node --build-from-source", {
cwd: rootPath,
stdio: "inherit",
})
// read dir with absolute paths
let packages = await getPackages()
for (const packageName of packages) {
const packagePath = path.resolve(packagesPath, packageName)

View File

@ -4,9 +4,13 @@ const path = require("path")
const rootPath = process.cwd()
const packagesPath = path.resolve(rootPath, "packages")
const excludedPackages = ["comty.js"]
async function readIgnoredPackages() {
const packages = await fs.promises.readFile(path.resolve(rootPath, ".ignorepackages"), "utf-8").catch(() => "")
function filterPackages(packages, ignore = []) {
return packages.split("\n")
}
async function filterPackages(packages, ignore = []) {
const gitIgnore = fs.readFileSync(path.resolve(rootPath, ".gitignore"), "utf-8")
// create a regex to match all packages that are in the gitignore file
@ -19,11 +23,6 @@ function filterPackages(packages, ignore = []) {
// filter packages that are in the gitignore file
packages = packages.filter((packageName) => {
// filter excluded packages
if (excludedPackages.includes(packageName)) {
return false
}
// filter ignored packages
if (ignore.includes(packageName)) {
return false
@ -46,7 +45,12 @@ function filterPackages(packages, ignore = []) {
async function getPackages({ ignore = [] } = {}) {
let packages = await fs.promises.readdir(packagesPath)
packages = filterPackages(packages, ignore)
const ignoredPackages = await readIgnoredPackages()
packages = filterPackages(packages, [
...ignore,
...ignoredPackages,
])
return packages
}

View File

@ -1,5 +1,4 @@
import axios from "axios"
import FormData from "form-data"
import qs from "qs"
const TIDAL_CLIENT_ID = process.env.TIDAL_CLIENT_ID

View File

@ -63,7 +63,7 @@ global.toBoolean = (value) => {
}
async function injectEnvFromInfisical() {
const envMode = global.isProduction ? "prod" : "dev"
const envMode = global.FORCE_ENV ?? global.isProduction ? "prod" : "dev"
console.log(`🔑 Injecting env variables from INFISICAL in [${envMode}] mode...`)
@ -72,7 +72,7 @@ async function injectEnvFromInfisical() {
})
const secrets = await client.getAllSecrets({
environment: global.isProduction ? "prod" : "dev",
environment: envMode,
attachToProcessEnv: false,
})